Loading libraries (1)
#Loading the EDA libraries
library(tools)
library(dplyr)
## Warning: package 'dplyr' was built under R version 3.5.3
library(tidyverse)
## Warning: package 'tidyverse' was built under R version 3.5.3
## Warning: package 'ggplot2' was built under R version 3.5.3
## Warning: package 'tibble' was built under R version 3.5.3
## Warning: package 'tidyr' was built under R version 3.5.3
## Warning: package 'readr' was built under R version 3.5.3
## Warning: package 'purrr' was built under R version 3.5.3
## Warning: package 'stringr' was built under R version 3.5.3
## Warning: package 'forcats' was built under R version 3.5.3
library(Hmisc)
## Warning: package 'Hmisc' was built under R version 3.5.3
## Warning: package 'survival' was built under R version 3.5.3
library(xlsx)
library(ggplot2)
library(gridExtra)
## Warning: package 'gridExtra' was built under R version 3.5.3
library(reshape2)
## Warning: package 'reshape2' was built under R version 3.5.3
library(tidyr)
library(psych)
## Warning: package 'psych' was built under R version 3.5.3
library(caret)
## Warning: package 'caret' was built under R version 3.5.3
library(readr)
library(car)
## Warning: package 'car' was built under R version 3.5.3
library(purrr)
library(data.table)
## Warning: package 'data.table' was built under R version 3.5.3
library(fastDummies)
## Warning: package 'fastDummies' was built under R version 3.5.3
#Loading data imputation libraries
library(mice)
## Warning: package 'mice' was built under R version 3.5.3
library(VIM)
## Warning: package 'VIM' was built under R version 3.5.3
## Warning: package 'colorspace' was built under R version 3.5.3
library(outliers)
library(DMwR)
## Warning: package 'DMwR' was built under R version 3.5.3
#Loading the modelling libraries
library(e1071)
## Warning: package 'e1071' was built under R version 3.5.3
library(ipred)
## Warning: package 'ipred' was built under R version 3.5.3
library(RWeka)
## Warning: package 'RWeka' was built under R version 3.5.3
library(rpart)
## Warning: package 'rpart' was built under R version 3.5.3
library(rpart.plot)
## Warning: package 'rpart.plot' was built under R version 3.5.3
library(InformationValue)
## Warning: package 'InformationValue' was built under R version 3.5.3
library(class)
library(MLmetrics)
## Warning: package 'MLmetrics' was built under R version 3.5.3
library(randomForest)
## Warning: package 'randomForest' was built under R version 3.5.3
library(party)
## Warning: package 'party' was built under R version 3.5.3
## Warning: package 'mvtnorm' was built under R version 3.5.3
## Warning: package 'strucchange' was built under R version 3.5.3
## Warning: package 'zoo' was built under R version 3.5.3
## Warning: package 'sandwich' was built under R version 3.5.3
library(dlookr)
## Warning: package 'dlookr' was built under R version 3.5.3
library(rpart)
library(rattle)
## Warning: package 'rattle' was built under R version 3.5.3
library(DescTools)
## Warning: package 'DescTools' was built under R version 3.5.3
library(lmtest)
## Warning: package 'lmtest' was built under R version 3.5.3
library(xgboost)
## Warning: package 'xgboost' was built under R version 3.5.3
library(Matrix)
## Warning: package 'Matrix' was built under R version 3.5.3
library(DiagrammeR)
## Warning: package 'DiagrammeR' was built under R version 3.5.3
library(partykit)
## Warning: package 'partykit' was built under R version 3.5.3
## Warning: package 'libcoin' was built under R version 3.5.3
library(MLeval)
## Warning: package 'MLeval' was built under R version 3.5.3
library(doSNOW)
## Warning: package 'doSNOW' was built under R version 3.5.3
## Warning: package 'foreach' was built under R version 3.5.3
## Warning: package 'iterators' was built under R version 3.5.3
## Warning: package 'snow' was built under R version 3.5.3
library(parallel)
File loading and basic exploration(2)
fName <- "C:/BigData/BABI/Capstone/Coronory Heart Risk Study/Coronary_heart_risk_study.csv"
loaddata <- function(fileName,sheetName1)
{
if (file_ext(fName) == "csv")
{
data <- read.csv(fName, header = TRUE,na.strings=c("", "NA"))
}
else if (file_ext(fName) == "txt")
{
data <- read.csv2(fName)
}
else if (file_ext(fName) == "xlsx")
{
data <- read.xlsx(fName,sheetName = sheetName1,as.data.frame = TRUE, header = TRUE)
}
return (data)
}
data <- loaddata(fName,"")
data<-dplyr::rename(data,"prevStroke"="prevalentStroke")
data<-dplyr::rename(data,"prevHyp"="prevalentHyp")
data<-dplyr::rename(data,"curSmoker"="currentSmoker")
data %>%
summarise_all(funs(sum(is.na(.)))) %>%
gather %>%
ggplot(aes(x = reorder(key, value), y = value)) + geom_bar(stat = "identity",fill="steelblue",color="steelblue") +
coord_flip() +
xlab("Variables") +
ylab("Initial absolute number of missings")
## Warning: funs() is soft deprecated as of dplyr 0.8.0
## Please use a list of either functions or lambdas:
##
## # Simple named list:
## list(mean = mean, median = median)
##
## # Auto named with `tibble::lst()`:
## tibble::lst(mean, median)
##
## # Using lambdas
## list(~ mean(., trim = .2), ~ median(., na.rm = TRUE))
## This warning is displayed once per session.

data %>%
select_if(is.numeric) %>% names
## [1] "male" "age" "education" "curSmoker" "cigsPerDay"
## [6] "BPMeds" "prevStroke" "prevHyp" "diabetes" "totChol"
## [11] "sysBP" "diaBP" "BMI" "heartRate" "glucose"
## [16] "TenYearCHD"
data %>%
select_if(is.numeric) %>%
gather %>%
ggplot(aes(x = value)) + facet_wrap(~ key, scales = "free", nrow = 3) +
geom_histogram()
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Warning: Removed 645 rows containing non-finite values (stat_bin).

data %>%
select_if(is.numeric) %>%
cor
## male age education curSmoker cigsPerDay
## male 1.000000000 -0.02901358 NA 0.19702562 NA
## age -0.029013582 1.00000000 NA -0.21366166 NA
## education NA NA 1 NA NA
## curSmoker 0.197025619 -0.21366166 NA 1.00000000 NA
## cigsPerDay NA NA NA NA 1
## BPMeds NA NA NA NA NA
## prevStroke -0.004550399 0.05767861 NA -0.03298039 NA
## prevHyp 0.005852836 0.30679947 NA -0.10371030 NA
## diabetes 0.015693075 0.10131408 NA -0.04428530 NA
## totChol NA NA NA NA NA
## sysBP -0.035879033 0.39405332 NA -0.13028149 NA
## diaBP 0.058199421 0.20558552 NA -0.10793319 NA
## BMI NA NA NA NA NA
## heartRate NA NA NA NA NA
## glucose NA NA NA NA NA
## TenYearCHD 0.088373572 0.22540774 NA 0.01944850 NA
## BPMeds prevStroke prevHyp diabetes totChol
## male NA -0.004550399 0.005852836 0.015693075 NA
## age NA 0.057678613 0.306799467 0.101314077 NA
## education NA NA NA NA NA
## curSmoker NA -0.032980386 -0.103710297 -0.044285298 NA
## cigsPerDay NA NA NA NA NA
## BPMeds 1 NA NA NA NA
## prevStroke NA 1.000000000 0.074791128 0.006955094 NA
## prevHyp NA 0.074791128 1.000000000 0.077752047 NA
## diabetes NA 0.006955094 0.077752047 1.000000000 NA
## totChol NA NA NA NA 1
## sysBP NA 0.056999937 0.696655883 0.111264543 NA
## diaBP NA 0.045153466 0.615840200 0.050260378 NA
## BMI NA NA NA NA NA
## heartRate NA NA NA NA NA
## glucose NA NA NA NA NA
## TenYearCHD NA 0.061822628 0.177457561 0.097344236 NA
## sysBP diaBP BMI heartRate glucose TenYearCHD
## male -0.03587903 0.05819942 NA NA NA 0.08837357
## age 0.39405332 0.20558552 NA NA NA 0.22540774
## education NA NA NA NA NA NA
## curSmoker -0.13028149 -0.10793319 NA NA NA 0.01944850
## cigsPerDay NA NA NA NA NA NA
## BPMeds NA NA NA NA NA NA
## prevStroke 0.05699994 0.04515347 NA NA NA 0.06182263
## prevHyp 0.69665588 0.61584020 NA NA NA 0.17745756
## diabetes 0.11126454 0.05026038 NA NA NA 0.09734424
## totChol NA NA NA NA NA NA
## sysBP 1.00000000 0.78395196 NA NA NA 0.21637383
## diaBP 0.78395196 1.00000000 NA NA NA 0.14511159
## BMI NA NA 1 NA NA NA
## heartRate NA NA NA 1 NA NA
## glucose NA NA NA NA 1 NA
## TenYearCHD 0.21637383 0.14511159 NA NA NA 1.00000000
data %>%
select_if(is.numeric) %>%
gather %>%
ggplot(aes(x = 1, y = value)) + facet_wrap(~ key, scales = "free") +
geom_boxplot(color="steelblue") +
ylab("Value") +
xlab("Variable")
## Warning: Removed 645 rows containing non-finite values (stat_boxplot).

data %>%
#select_if(negate(is.numeric)) %>%
#select(-matches("essay")) %>%
gather %>%
ggplot(aes(x = value)) + geom_bar(fill="steelblue",color="steelblue") +
facet_wrap(~ key, scales = "free", ncol = 3)
## Warning: Removed 645 rows containing non-finite values (stat_count).

data %>%
select_if(is.numeric) %>%
map(summary)
## $male
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0.0000 0.0000 0.0000 0.4292 1.0000 1.0000
##
## $age
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 32.00 42.00 49.00 49.58 56.00 70.00
##
## $education
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## 1.000 1.000 2.000 1.979 3.000 4.000 105
##
## $curSmoker
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0.0000 0.0000 0.0000 0.4941 1.0000 1.0000
##
## $cigsPerDay
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## 0.000 0.000 0.000 9.006 20.000 70.000 29
##
## $BPMeds
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## 0.00000 0.00000 0.00000 0.02962 0.00000 1.00000 53
##
## $prevStroke
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0.000000 0.000000 0.000000 0.005896 0.000000 1.000000
##
## $prevHyp
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0.0000 0.0000 0.0000 0.3106 1.0000 1.0000
##
## $diabetes
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0.00000 0.00000 0.00000 0.02571 0.00000 1.00000
##
## $totChol
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## 107.0 206.0 234.0 236.7 263.0 696.0 50
##
## $sysBP
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 83.5 117.0 128.0 132.4 144.0 295.0
##
## $diaBP
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 48.0 75.0 82.0 82.9 90.0 142.5
##
## $BMI
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## 15.54 23.07 25.40 25.80 28.04 56.80 19
##
## $heartRate
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## 44.00 68.00 75.00 75.88 83.00 143.00 1
##
## $glucose
## Min. 1st Qu. Median Mean 3rd Qu. Max. NA's
## 40.00 71.00 78.00 81.96 87.00 394.00 388
##
## $TenYearCHD
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0.0000 0.0000 0.0000 0.1519 0.0000 1.0000
#missingFrame <- apply(is.na(data), 2, which)
#missingFrame$glucose
#which(complete.cases(data) == FALSE)
sum(is.na(data))
## [1] 645
data %>%
select_if(is.numeric) %>%
cor.plot(numbers = TRUE, main="Correlation Plot of CHD Dataset",diag=FALSE)

KMO(data)
## Kaiser-Meyer-Olkin factor adequacy
## Call: KMO(r = data)
## Overall MSA = 0.69
## MSA for each item =
## male age education curSmoker cigsPerDay BPMeds
## 0.54 0.73 0.65 0.57 0.55 0.86
## prevStroke prevHyp diabetes totChol sysBP diaBP
## 0.70 0.87 0.55 0.77 0.71 0.72
## BMI heartRate glucose TenYearCHD
## 0.85 0.71 0.55 0.83
prop.table(table(data$diabetes,data$TenYearCHD),1)*100
##
## 0 1
## 0 85.37884 14.62116
## 1 63.30275 36.69725
prop.table(table(data$prevStroke,data$TenYearCHD),1)*100
##
## 0 1
## 0 84.98221 15.01779
## 1 56.00000 44.00000
prop.table(table(data$prevHyp,data$TenYearCHD),1)*100
##
## 0 1
## 0 89.08655 10.91345
## 1 75.32270 24.67730
prop.table(table(data$BPMeds,data$TenYearCHD),1)*100
##
## 0 1
## 0 85.42949 14.57051
## 1 66.93548 33.06452
Exploratory data analysis
data1 <- data
data1$education <- as.character(
factor(
data1$education,
levels = c("1", "2","3","4"),
labels = c("1","2","3","4")))
data1$male <- as.character(
factor(
data1$male,
levels = c("0", "1"),
labels = c("Female", "Male")))
data1$TenYearCHD <- as.character(
factor(
data1$TenYearCHD,
levels = c("0", "1"),
labels = c("No", "Yes")))
data1$BPMeds <- as.character(
factor(
data1$BPMeds,
levels = c("0", "1"),
labels = c("No", "Yes")))
data1$prevStroke <- as.character(
factor(
data1$prevStroke,
levels = c("0", "1"),
labels = c("No", "Yes")))
data1$prevHyp <- as.character(
factor(
data1$prevHyp,
levels = c("0", "1"),
labels = c("No", "Yes")))
data1$diabetes <- as.character(
factor(
data1$diabetes,
levels = c("0", "1"),
labels = c("No", "Yes")))
data1$curSmoker <- as.character(
factor(
data1$curSmoker,
levels = c("0", "1"),
labels = c("No", "Yes")))
cutage <- cut(data1$age,breaks=c(30,40,50,60,70,Inf),labels=c("30-40","40-50","50-60","60-70","55to65"))
#Age distribution among M/F employees
ggplot(data1)+geom_bar(aes(x=cutage,fill=as.factor(male)))+theme_bw()+scale_fill_discrete(name="Gender")+
labs(
x = "Age",
title = paste(
"Male/Female among Age groups"
))

educationFilter <- filter(data1,education %in% c("1","2","3","4"))
cutageedu <- cut(educationFilter$age,breaks=c(30,40,50,60,70,Inf),labels=c("30-40","40-50","50-60","60-70","55to65"))
#Education among age groups
ggplot(educationFilter)+geom_bar(aes(x=cutageedu,fill=as.factor(education)))+theme_bw()+scale_fill_discrete(name="Education")+
labs(
x = "Age",
title = paste(
"Education among age groups"
))

#Gender distribution vs Coronary heart disease
ggplot(data1)+geom_bar(aes(x=TenYearCHD,fill=as.factor(male)))+scale_fill_discrete(name="Gender")+theme_bw()+labs(
x = "CHD Suspect",
#y = "Male Vs Female",
title = paste(
"Gender distribution among CHD Suspects"
))

#Age wise CHD suspects
ggplot(data1)+geom_bar(aes(x=cutage,fill=as.factor(TenYearCHD)))+theme_bw()+scale_fill_discrete(name="CHD Suspect")+
labs(
x = "Age",
title = paste(
"Agewise CHD Suspects "
))

#Genderwise Smokers
p1 <- ggplot(data1)+geom_bar(aes(x=male,fill=as.factor(curSmoker)))+theme_bw()+scale_fill_discrete(name="Smokers")+scale_x_discrete(name="Gender")+labs(title = "Genderwise Smokers")
#Age wise Smokers
p2<-ggplot(data1)+geom_bar(aes(x=cutage,fill=as.factor(curSmoker)))+theme_bw()+scale_fill_discrete(name="Smokers")+
labs(
x = "Age",
title = paste(
"Smokers among Age group "
))
grid.arrange(p1,p2)

#Diabetes among age group
p3<-ggplot(data1)+geom_bar(aes(x=cutage,fill=as.factor(diabetes)))+theme_bw()+scale_fill_discrete(name="Diabetes")+
labs(
x = "Age",
title = paste(
"Diabetes among age group "
))
#Prevalent Stroke among age group
p4<-ggplot(data1)+geom_bar(aes(x=cutage,fill=as.factor(prevStroke)))+theme_bw()+scale_fill_discrete(name="Stroke")+
labs(
x = "Age",
title = paste(
"Stroke among age group "
))
#Prevalent stroke among Smokers
p5<-ggplot(data1)+geom_bar(aes(x=diabetes,fill=as.factor(curSmoker)))+theme_bw()+scale_fill_discrete(name="Smokers")+
labs(
x = "Diabetes",
title = paste(
"Diabetes among Smokers"
))
#Diabetes among Smokers
p6<-ggplot(data1)+geom_bar(aes(x=prevStroke,fill=as.factor(curSmoker)))+theme_bw()+scale_fill_discrete(name="Smokers")+labs(x = "Prevalent Stroke",title = paste("Stroke among Smokers"))
grid.arrange(p3,p5,p4,p6,ncol=2, nrow=2)

bpmedfilter <- filter(data1,BPMeds %in% c("Yes","No"))
cutagebp <- cut(bpmedfilter$age,breaks=c(30,40,50,60,70,Inf),labels=c("30-40","40-50","50-60","60-70","55to65"))
#BP Medication among age group
ggplot(bpmedfilter)+geom_bar(aes(x=cutagebp,fill=as.factor(BPMeds)))+theme_bw()+scale_fill_discrete(name="BP Medication")+
labs(
x = "Age",
title = paste(
"BP Medication among age group "
))

#Diabetes among age group
ggplot(data1)+geom_bar(aes(x=cutage,fill=as.factor(diabetes)))+theme_bw()+scale_fill_discrete(name="Diabetes")+
labs(
x = "Age",
title = paste(
"Diabetes among age group "
))

#Education among age group
ggplot(data1)+geom_bar(aes(x=cutage,fill=as.factor(education)))+theme_bw()+scale_fill_discrete(name="Education")+
labs(
x = "Age",
title = paste(
"Education among age group "
))

cholFilter <- filter(data1,totChol %in% (1:800))
cutChol <- cut(cholFilter$totChol,breaks=c(100,200,300,400,500,600,700),labels=c("0-100","100-200","200-300","300-400","500-600",">600"))
#Cholestoral among CHD Suspects
ggplot(cholFilter)+geom_bar(aes(x=cutChol,fill=as.factor(TenYearCHD)))+theme_bw()+
scale_fill_discrete(name="CHD Suspects")+
labs(
#y = "Age",
x= "Tot Cholestoral",
title = paste("Cholestoral Vs CHD Suspects"))

#Glucose vs sysBP
ggplot(data1, aes(x=glucose, y=sysBP,color=male)) +geom_point(shape=18) + geom_smooth(method=lm,fullrange=TRUE,se=FALSE)+geom_density2d()+stat_density_2d(aes(fill = ..level..), geom="polygon")
## Warning: Removed 388 rows containing non-finite values (stat_smooth).
## Warning: Removed 388 rows containing non-finite values (stat_density2d).
## Warning: Removed 388 rows containing non-finite values (stat_density2d).
## Warning: Removed 388 rows containing missing values (geom_point).

#+geom_text(label=rownames(data))
# Glucose vs diaBP
ggplot(data1, aes(x=glucose, y=diaBP,color=male)) +geom_point(shape=18) + geom_smooth(method=lm,fullrange=TRUE,se=FALSE)
## Warning: Removed 388 rows containing non-finite values (stat_smooth).
## Warning: Removed 388 rows containing missing values (geom_point).

#Age vs Tot Chol
cutagechol <- cut(cholFilter$age,breaks=c(30,40,50,60,70,Inf),labels=c("30-40","40-50","50-60","60-70","55to65"))
ggplot(cholFilter, aes(x=age, y=totChol,color=male)) +geom_point(shape=18) + geom_smooth(method=lm,fullrange=TRUE,se=FALSE)#+stat_ellipse(type = "norm")

#Heartrate vs Total Cholestoral
ggplot(cholFilter, aes(x=heartRate, y=totChol)) +geom_point(shape=18, color="steelblue") + geom_smooth(method=lm,fullrange=TRUE,se=FALSE)+stat_ellipse(type = "norm")
## Warning: Removed 1 rows containing non-finite values (stat_smooth).
## Warning: Removed 1 rows containing non-finite values (stat_ellipse).
## Warning: Removed 1 rows containing missing values (geom_point).

#Systolic BP vs CHD Suspect
ggplot(data1, aes(glucose, fill=TenYearCHD)) + geom_density(alpha=.5) +
scale_fill_manual(values = c('#999999','#E69F00')) + theme(legend.position = "right")+scale_fill_discrete(name="CHD Suspects")
## Scale for 'fill' is already present. Adding another scale for 'fill',
## which will replace the existing scale.
## Warning: Removed 388 rows containing non-finite values (stat_density).

#Diastolic BP vs CHD suspect
ggplot(data1, aes(totChol, fill=TenYearCHD)) + geom_density(alpha=.5) +
scale_fill_manual(values = c('#999999','#E69F00')) + theme(legend.position = "right")+scale_fill_discrete(name="CHD Suspects")
## Scale for 'fill' is already present. Adding another scale for 'fill',
## which will replace the existing scale.
## Warning: Removed 50 rows containing non-finite values (stat_density).

#Cholestoral among CHD Suspects
ggplot(cholFilter)+geom_bar(aes(x=cutagechol,fill=as.factor(cutChol)))+theme_bw()+
scale_fill_discrete(name="Cholestoral")+
labs(
#y = "Age",
x= "Age",
title = paste("Age Vs Cholestoral"))

#Area plot for Total Cholestoral against Gender
ggplot(data1,aes(x=totChol))+geom_area(aes(fill = male),stat ="bin", alpha=0.6) +
theme_classic()+labs(title="Total Cholestoral among Gender")+scale_fill_discrete(name="Gender")
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Warning: Removed 50 rows containing non-finite values (stat_bin).

#Density plot of Tot cholestoral among Gender
ggplot(data1,aes(x=totChol))+geom_density(aes(color = male)) +
geom_vline(data=data1, aes(xintercept=mean(totChol), color=male),
linetype="dashed") +
scale_color_manual(values=c("#E69F00", "steelblue"))+scale_fill_discrete(name="Gender")
## Warning: Removed 50 rows containing non-finite values (stat_density).
## Warning: Removed 4240 rows containing missing values (geom_vline).

#Density plot of Glucose among Gender
ggplot(data1,aes(x=glucose))+geom_density(aes(color = male)) +
geom_vline(data=data1, aes(xintercept=mean(glucose), color=male),
linetype="dashed") +scale_color_manual(values=c("#E69F00", "steelblue"))+
scale_fill_discrete(name="Gender")
## Warning: Removed 388 rows containing non-finite values (stat_density).
## Warning: Removed 4240 rows containing missing values (geom_vline).

ggplot(data1,aes(x=glucose))+geom_dotplot(aes(fill=TenYearCHD))+scale_fill_discrete(name="CHD Suspect")
## `stat_bindot()` using `bins = 30`. Pick better value with `binwidth`.
## Warning: Removed 388 rows containing non-finite values (stat_bindot).

ggplot(data1,aes(x=diaBP))+stat_bin(bindwidth=15)+geom_dotplot(aes(fill=(TenYearCHD="Yes")))+scale_fill_discrete(name="CHD Suspect")
## Warning: Ignoring unknown parameters: bindwidth
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## `stat_bindot()` using `bins = 30`. Pick better value with `binwidth`.

ggplot(educationFilter,aes(x=glucose))+stat_bin(bindwidth=15)+geom_dotplot(aes(fill=education))
## Warning: Ignoring unknown parameters: bindwidth
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Warning: Removed 380 rows containing non-finite values (stat_bin).
## `stat_bindot()` using `bins = 30`. Pick better value with `binwidth`.
## Warning: Removed 380 rows containing non-finite values (stat_bindot).

ggplot(educationFilter,aes(x=totChol))+stat_bin(bindwidth=15)+geom_dotplot(aes(fill=education))
## Warning: Ignoring unknown parameters: bindwidth
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Warning: Removed 49 rows containing non-finite values (stat_bin).
## `stat_bindot()` using `bins = 30`. Pick better value with `binwidth`.
## Warning: Removed 49 rows containing non-finite values (stat_bindot).

Treating for outliers (5%-95%) capping and mice imputation of NAs
# pcap <- function(x){
# for (i in which(sapply(x, is.numeric))) {
# quantiles <- quantile( x[,i], c(.05, .95 ), na.rm =TRUE)
# x[,i] = ifelse(x[,i] < quantiles[1] , quantiles[1], x[,i])
# x[,i] = ifelse(x[,i] > quantiles[2] , quantiles[2], x[,i])}
# x}
# datacap <- data
# datacap$male <- as.factor(datacap$male)
# datacap$education <- as.factor(datacap$education)
# datacap$curSmoker <- as.factor(datacap$curSmoker)
# datacap$prevStroke <- as.factor(datacap$prevStroke)
# datacap$prevHyp <- as.factor(datacap$prevHyp)
# datacap$diabetes <- as.factor(datacap$diabetes)
# datacap$BPMeds <- as.factor(datacap$BPMeds)
# datacap$TenYearCHD <- as.factor(datacap$TenYearCHD)
#
#
# abcd <- pcap(datacap)
#
# quantile(abcd[,15], c(0.25,0.5,.95, .99, 1), na.rm = TRUE)
#
# #abcd$TenYearCHD <- as.factor(abcd$TenYearCHD)
#
# miceModab <- mice(abcd[,!names(abcd) %in% 'TenYearCHD'],seed = 500,maxit = 20,printFlag = FALSE)
# miceOutputab <- complete(miceModab)
# anyNA(miceOutputab)
#
# miceOutputab1 <- cbind(miceOutputab,data$TenYearCHD)
# miceOutputab1 <- dplyr::rename(miceOutputab1,"TenYearCHD"="data$TenYearCHD")
#
# miceOutputab1$TenYearCHD <- as.factor(miceOutputab1$TenYearCHD)
#
# dfCHDModel1 <- miceOutputab1
#
# densityplot(miceModab)
Using dlookr package for Outlier treatment + imputing NAs
data2 <- data
data2$male <- as.factor(data2$male)
data2$prevStroke <- as.factor(data2$prevStroke)
data2$prevHyp <- as.factor(data2$prevHyp)
data2$education <- as.factor(data2$education)
data2$BPMeds <- as.factor(data2$BPMeds)
data2$diabetes <- as.factor(data2$diabetes)
data2$curSmoker <- as.factor(data2$curSmoker)
data2$TenYearCHD <- as.factor(data2$TenYearCHD)
starttime <- Sys.time()
######Glucose#########################################
glucose1 <- imputate_outlier(data2,glucose,method ="capping")
## Warning: Unquoting language objects with `!!!` is deprecated as of rlang 0.4.0.
## Please use `!!` instead.
##
## # Bad:
## dplyr::select(data, !!!enquo(x))
##
## # Good:
## dplyr::select(data, !!enquo(x)) # Unquote single quosure
## dplyr::select(data, !!!enquos(x)) # Splice list of quosures
##
## This warning is displayed once per session.
summary(glucose1)
## Warning: `cols` is now required.
## Please use `cols = c(statistic)`
## Impute outliers with capping
##
## * Information of Imputation (before vs after)
## Original Imputation
## n 3852.0000000 3852.00000000
## na 388.0000000 388.00000000
## mean 81.9636552 79.84230270
## sd 23.9543348 12.48923977
## se_mean 0.3859588 0.20123006
## IQR 16.0000000 16.00000000
## skewness 6.2149483 0.55538718
## kurtosis 58.7037414 -0.04218054
## p00 40.0000000 47.00000000
## p01 55.0000000 57.00000000
## p05 62.0000000 62.00000000
## p10 65.0000000 65.00000000
## p20 70.0000000 70.00000000
## p25 71.0000000 71.00000000
## p30 73.0000000 73.00000000
## p40 75.0000000 75.00000000
## p50 78.0000000 78.00000000
## p60 81.0000000 81.00000000
## p70 85.0000000 85.00000000
## p75 87.0000000 87.00000000
## p80 89.0000000 89.00000000
## p90 98.0000000 98.00000000
## p95 108.4500000 108.20250000
## p99 174.9600000 108.45000000
## p100 394.0000000 111.00000000
plot(glucose1)

data2 <- cbind(data2,glucose1)
data2$glucose1 <- as.numeric(data2$glucose1)
data2 <- data2[,c(-15)]
glucose2 <- imputate_na(data2,glucose1,method = "mice",print_flag = FALSE)
summary(glucose2)
## Warning: `cols` is now required.
## Please use `cols = c(statistic)`
## * Information of Imputation (before vs after)
## Original Imputation
## n 3852.00000000 4240.0000000
## na 388.00000000 0.0000000
## mean 79.84230270 79.8456792
## sd 12.48923977 12.0277904
## se_mean 0.20123006 0.1847153
## IQR 16.00000000 14.0000000
## skewness 0.55538718 0.5699037
## kurtosis -0.04218054 0.1409208
## p00 47.00000000 47.0000000
## p01 57.00000000 57.0000000
## p05 62.00000000 62.0000000
## p10 65.00000000 66.0000000
## p20 70.00000000 70.0000000
## p25 71.00000000 72.0000000
## p30 73.00000000 73.0000000
## p40 75.00000000 76.0000000
## p50 78.00000000 78.0000000
## p60 81.00000000 81.0000000
## p70 85.00000000 84.0000000
## p75 87.00000000 86.0000000
## p80 89.00000000 88.0000000
## p90 98.00000000 97.0000000
## p95 108.20250000 107.0000000
## p99 108.45000000 108.4500000
## p100 111.00000000 111.0000000
plot(glucose2)

data2 <- cbind(data2,glucose2)
data2$glucose2 <- as.numeric(data2$glucose2)
data2 <- data2[,c(-16)]
data2<-dplyr::rename(data2,"glucose"="glucose2")
###################Education#############################################
data2$education <- as.factor(data2$education)
education1 <- imputate_na(data2,education,method = "mice",seed = 100, print_flag = FALSE)
summary(education1)
## * Impute missing values based on Multivariate Imputation by Chained Equations
## - method : mice
## - random seed : 100
##
## * Information of Imputation (before vs after)
## original imputation original_percent imputation_percent
## 1 1720 1768 40.57 41.70
## 2 1253 1280 29.55 30.19
## 3 689 707 16.25 16.67
## 4 473 485 11.16 11.44
## <NA> 105 0 2.48 0.00
plot(education1)

data2 <- cbind(data2,education1)
data2$education1 <- as.factor(data2$education1)
data2 <- data2[,c(-3)]
data2<-dplyr::rename(data2,"education"="education1")
#######################BMI########################################
bmi1 <- imputate_outlier(data2,BMI,method ="capping")
summary(bmi1)
## Warning: `cols` is now required.
## Please use `cols = c(statistic)`
## Impute outliers with capping
##
## * Information of Imputation (before vs after)
## Original Imputation
## n 4.221000e+03 4221.00000000
## na 1.900000e+01 19.00000000
## mean 2.580080e+01 25.65549159
## sd 4.079840e+00 3.65593642
## se_mean 6.279651e-02 0.05627182
## IQR 4.970000e+00 4.97000000
## skewness 9.821833e-01 0.27394434
## kurtosis 2.657310e+00 -0.31002603
## p00 1.554000e+01 15.96000000
## p01 1.816400e+01 18.18000000
## p05 2.006000e+01 20.06000000
## p10 2.108000e+01 21.08000000
## p20 2.253000e+01 22.53000000
## p25 2.307000e+01 23.07000000
## p30 2.356000e+01 23.56000000
## p40 2.447000e+01 24.47000000
## p50 2.540000e+01 25.40000000
## p60 2.635000e+01 26.35000000
## p70 2.742000e+01 27.42000000
## p75 2.804000e+01 28.04000000
## p80 2.869000e+01 28.69000000
## p90 3.077000e+01 30.77000000
## p95 3.278000e+01 32.78000000
## p99 3.895600e+01 34.39800000
## p100 5.680000e+01 35.45000000
plot(bmi1)

data2 <- cbind(data2,bmi1)
data2$bmi1 <- as.numeric(data2$bmi1)
data2 <- data2[,c(-12)]
bmi2 <- imputate_na(data2,bmi1,method = "mice",seed=100,print_flag = FALSE)
summary(bmi2)
## Warning: `cols` is now required.
## Please use `cols = c(statistic)`
## * Impute missing values based on Multivariate Imputation by Chained Equations
## - method : mice
## - random seed : 100
##
## * Information of Imputation (before vs after)
## Original Imputation
## n 4221.00000000 4240.0000000
## na 19.00000000 0.0000000
## mean 25.65549159 25.6548396
## sd 3.65593642 3.6516846
## se_mean 0.05627182 0.0560803
## IQR 4.97000000 4.9700000
## skewness 0.27394434 0.2745537
## kurtosis -0.31002603 -0.3072472
## p00 15.96000000 15.9600000
## p01 18.18000000 18.1800000
## p05 20.06000000 20.0600000
## p10 21.08000000 21.0990000
## p20 22.53000000 22.5300000
## p25 23.07000000 23.0700000
## p30 23.56000000 23.5700000
## p40 24.47000000 24.4700000
## p50 25.40000000 25.3850000
## p60 26.35000000 26.3500000
## p70 27.42000000 27.4200000
## p75 28.04000000 28.0400000
## p80 28.69000000 28.6900000
## p90 30.77000000 30.7610000
## p95 32.78000000 32.7705000
## p99 34.39800000 34.3961000
## p100 35.45000000 35.4500000
plot(bmi2)

data2 <- cbind(data2,bmi2)
data2$bmi2 <- as.numeric(data2$bmi2)
data2 <- data2[,c(-16)]
data2<-dplyr::rename(data2,"BMI"="bmi2")
boxplot(data2$BMI)

#######################################
sysBP1 <- imputate_outlier(data2,sysBP,method ="capping")
summary(sysBP1)
## Warning: `cols` is now required.
## Please use `cols = c(statistic)`
## Impute outliers with capping
##
## * Information of Imputation (before vs after)
## Original Imputation
## n 4240.0000000 4240.0000000
## na 0.0000000 0.0000000
## mean 132.3545991 131.6333726
## sd 22.0332996 19.9758159
## se_mean 0.3383737 0.3067762
## IQR 27.0000000 27.0000000
## skewness 1.1452850 0.6063585
## kurtosis 2.1566236 -0.2295674
## p00 83.5000000 83.5000000
## p01 97.0000000 97.0000000
## p05 104.0000000 104.0000000
## p10 108.9500000 108.9500000
## p20 114.0000000 114.0000000
## p25 117.0000000 117.0000000
## p30 119.5000000 119.5000000
## p40 124.0000000 124.0000000
## p50 128.0000000 128.0000000
## p60 133.0000000 133.0000000
## p70 140.0000000 140.0000000
## p75 144.0000000 144.0000000
## p80 148.0000000 148.0000000
## p90 162.0000000 162.0000000
## p95 175.0000000 175.0000000
## p99 200.0000000 180.0000000
## p100 295.0000000 184.5000000
plot(sysBP1)

data2 <- cbind(data2,sysBP1)
data2$sysBP1 <- as.numeric(data2$sysBP1)
data2 <- data2[,c(-10)]
sysBP2 <- imputate_na(data2,sysBP1,method = "mice",seed=100,print_flag = FALSE)
## Warning in imputate_na_impl(.data, vars, target, method, seed,
## print_flag, : There are no missing values in sysBP1.
summary(sysBP2)
## Warning: `cols` is now required.
## Please use `cols = c(statistic)`
## * Impute missing values based on Multivariate Imputation by Chained Equations
## - method : mice
## - random seed : 100
##
## * Information of Imputation (before vs after)
## Original Imputation
## n 4240.0000000 4240.0000000
## na 0.0000000 0.0000000
## mean 131.6333726 131.6333726
## sd 19.9758159 19.9758159
## se_mean 0.3067762 0.3067762
## IQR 27.0000000 27.0000000
## skewness 0.6063585 0.6063585
## kurtosis -0.2295674 -0.2295674
## p00 83.5000000 83.5000000
## p01 97.0000000 97.0000000
## p05 104.0000000 104.0000000
## p10 108.9500000 108.9500000
## p20 114.0000000 114.0000000
## p25 117.0000000 117.0000000
## p30 119.5000000 119.5000000
## p40 124.0000000 124.0000000
## p50 128.0000000 128.0000000
## p60 133.0000000 133.0000000
## p70 140.0000000 140.0000000
## p75 144.0000000 144.0000000
## p80 148.0000000 148.0000000
## p90 162.0000000 162.0000000
## p95 175.0000000 175.0000000
## p99 180.0000000 180.0000000
## p100 184.5000000 184.5000000
plot(sysBP2)

data2 <- cbind(data2,sysBP2)
data2$sysBP2 <- as.numeric(data2$sysBP2)
data2 <- data2[,c(-16)]
data2<-dplyr::rename(data2,"sysBP"="sysBP2")
boxplot(data2$sysBP)

##################DiaBP#####################
diaBP1 <- imputate_outlier(data2,diaBP,method ="capping")
summary(diaBP1)
## Warning: `cols` is now required.
## Please use `cols = c(statistic)`
## Impute outliers with capping
##
## * Information of Imputation (before vs after)
## Original Imputation
## n 4240.0000000 4240.0000000
## na 0.0000000 0.0000000
## mean 82.8977594 82.6235377
## sd 11.9103945 11.0600511
## se_mean 0.1829124 0.1698534
## IQR 15.0000000 15.0000000
## skewness 0.7132502 0.3016647
## kurtosis 1.2753143 -0.2425547
## p00 48.0000000 53.0000000
## p01 60.0000000 60.0000000
## p05 66.0000000 66.0000000
## p10 69.0000000 69.0000000
## p20 73.0000000 73.0000000
## p25 75.0000000 75.0000000
## p30 76.0000000 76.0000000
## p40 80.0000000 80.0000000
## p50 82.0000000 82.0000000
## p60 85.0000000 85.0000000
## p70 87.5000000 87.5000000
## p75 90.0000000 90.0000000
## p80 92.0000000 92.0000000
## p90 98.0000000 98.0000000
## p95 104.5250000 104.5012500
## p99 118.0000000 109.0000000
## p100 142.5000000 112.5000000
plot(diaBP1)

data2 <- cbind(data2,diaBP1)
data2$diaBP1 <- as.numeric(data2$diaBP1)
data2 <- data2[,c(-10)]
diaBP2 <- imputate_na(data2,diaBP1,method = "mice",seed=100,print_flag = FALSE)
## Warning in imputate_na_impl(.data, vars, target, method, seed,
## print_flag, : There are no missing values in diaBP1.
summary(diaBP2)
## Warning: `cols` is now required.
## Please use `cols = c(statistic)`
## * Impute missing values based on Multivariate Imputation by Chained Equations
## - method : mice
## - random seed : 100
##
## * Information of Imputation (before vs after)
## Original Imputation
## n 4240.0000000 4240.0000000
## na 0.0000000 0.0000000
## mean 82.6235377 82.6235377
## sd 11.0600511 11.0600511
## se_mean 0.1698534 0.1698534
## IQR 15.0000000 15.0000000
## skewness 0.3016647 0.3016647
## kurtosis -0.2425547 -0.2425547
## p00 53.0000000 53.0000000
## p01 60.0000000 60.0000000
## p05 66.0000000 66.0000000
## p10 69.0000000 69.0000000
## p20 73.0000000 73.0000000
## p25 75.0000000 75.0000000
## p30 76.0000000 76.0000000
## p40 80.0000000 80.0000000
## p50 82.0000000 82.0000000
## p60 85.0000000 85.0000000
## p70 87.5000000 87.5000000
## p75 90.0000000 90.0000000
## p80 92.0000000 92.0000000
## p90 98.0000000 98.0000000
## p95 104.5012500 104.5012500
## p99 109.0000000 109.0000000
## p100 112.5000000 112.5000000
plot(diaBP2)

data2 <- cbind(data2,diaBP2)
data2$diaBP2 <- as.numeric(data2$diaBP2)
data2 <- data2[,c(-16)]
data2<-dplyr::rename(data2,"diaBP"="diaBP2")
boxplot(data2$diaBP)

###################heartRate####################
heartRate1 <- imputate_outlier(data2,heartRate,method ="capping")
summary(heartRate1)
## Warning: `cols` is now required.
## Please use `cols = c(statistic)`
## Impute outliers with capping
##
## * Information of Imputation (before vs after)
## Original Imputation
## n 4239.0000000 4239.0000000
## na 1.0000000 1.0000000
## mean 75.8789809 75.6319887
## sd 12.0253480 11.3095146
## se_mean 0.1846996 0.1737050
## IQR 15.0000000 15.0000000
## skewness 0.6443718 0.2984974
## kurtosis 0.9073957 -0.3435792
## p00 44.0000000 46.0000000
## p01 52.0000000 52.0000000
## p05 60.0000000 60.0000000
## p10 60.0000000 60.0000000
## p20 65.0000000 65.0000000
## p25 68.0000000 68.0000000
## p30 70.0000000 70.0000000
## p40 72.0000000 72.0000000
## p50 75.0000000 75.0000000
## p60 77.0000000 77.0000000
## p70 80.0000000 80.0000000
## p75 83.0000000 83.0000000
## p80 85.0000000 85.0000000
## p90 92.0000000 92.0000000
## p95 98.0000000 98.0000000
## p99 110.0000000 100.0000000
## p100 143.0000000 105.0000000
plot(heartRate1)

data2 <- cbind(data2,heartRate1)
data2$heartRate1 <- as.numeric(data2$heartRate1)
data2 <- data2[,c(-10)]
heartRate2 <- imputate_na(data2,heartRate1,method = "mice",seed=100,print_flag = FALSE)
summary(heartRate2)
## Warning: `cols` is now required.
## Please use `cols = c(statistic)`
## * Impute missing values based on Multivariate Imputation by Chained Equations
## - method : mice
## - random seed : 100
##
## * Information of Imputation (before vs after)
## Original Imputation
## n 4239.0000000 4240.0000000
## na 1.0000000 0.0000000
## mean 75.6319887 75.6329245
## sd 11.3095146 11.3083447
## se_mean 0.1737050 0.1736665
## IQR 15.0000000 15.0000000
## skewness 0.2984974 0.2982814
## kurtosis -0.3435792 -0.3432020
## p00 46.0000000 46.0000000
## p01 52.0000000 52.0000000
## p05 60.0000000 60.0000000
## p10 60.0000000 60.0000000
## p20 65.0000000 65.0000000
## p25 68.0000000 68.0000000
## p30 70.0000000 70.0000000
## p40 72.0000000 72.0000000
## p50 75.0000000 75.0000000
## p60 77.0000000 77.0000000
## p70 80.0000000 80.0000000
## p75 83.0000000 83.0000000
## p80 85.0000000 85.0000000
## p90 92.0000000 92.0000000
## p95 98.0000000 98.0000000
## p99 100.0000000 100.0000000
## p100 105.0000000 105.0000000
plot(heartRate2)

data2 <- cbind(data2,heartRate2)
data2$heartRate2 <- as.numeric(data2$heartRate2)
data2 <- data2[,c(-16)]
data2<-dplyr::rename(data2,"heartRate"="heartRate2")
boxplot(data2$heartRate)

#############################################################
###################cigsPerDay####################
cigsPerDay1 <- imputate_outlier(data2,cigsPerDay,method ="capping")
summary(cigsPerDay1)
## Warning: `cols` is now required.
## Please use `cols = c(statistic)`
## Impute outliers with capping
##
## * Information of Imputation (before vs after)
## Original Imputation
## n 4211.0000000 4211.0000000
## na 29.0000000 29.0000000
## mean 9.0059368 8.9180717
## sd 11.9224618 11.6497045
## se_mean 0.1837271 0.1795239
## IQR 20.0000000 20.0000000
## skewness 1.2470524 1.1226870
## kurtosis 1.0194182 0.3337949
## p00 0.0000000 0.0000000
## p01 0.0000000 0.0000000
## p05 0.0000000 0.0000000
## p10 0.0000000 0.0000000
## p20 0.0000000 0.0000000
## p25 0.0000000 0.0000000
## p30 0.0000000 0.0000000
## p40 0.0000000 0.0000000
## p50 0.0000000 0.0000000
## p60 9.0000000 9.0000000
## p70 15.0000000 15.0000000
## p75 20.0000000 20.0000000
## p80 20.0000000 20.0000000
## p90 25.0000000 25.0000000
## p95 30.0000000 30.0000000
## p99 43.0000000 43.0000000
## p100 70.0000000 50.0000000
plot(cigsPerDay1)

data2 <- cbind(data2,cigsPerDay1)
data2$cigsPerDay1 <- as.numeric(data2$cigsPerDay1)
data2 <- data2[,c(-4)]
cigsPerDay2 <- imputate_na(data2,cigsPerDay1,method = "mice",seed=100,print_flag = FALSE)
summary(cigsPerDay2)
## Warning: `cols` is now required.
## Please use `cols = c(statistic)`
## * Impute missing values based on Multivariate Imputation by Chained Equations
## - method : mice
## - random seed : 100
##
## * Information of Imputation (before vs after)
## Original Imputation
## n 4211.0000000 4240.0000000
## na 29.0000000 0.0000000
## mean 8.9180717 8.9824528
## sd 11.6497045 11.6405857
## se_mean 0.1795239 0.1787689
## IQR 20.0000000 20.0000000
## skewness 1.1226870 1.1064888
## kurtosis 0.3337949 0.3025172
## p00 0.0000000 0.0000000
## p01 0.0000000 0.0000000
## p05 0.0000000 0.0000000
## p10 0.0000000 0.0000000
## p20 0.0000000 0.0000000
## p25 0.0000000 0.0000000
## p30 0.0000000 0.0000000
## p40 0.0000000 0.0000000
## p50 0.0000000 0.0000000
## p60 9.0000000 9.0000000
## p70 15.0000000 15.0000000
## p75 20.0000000 20.0000000
## p80 20.0000000 20.0000000
## p90 25.0000000 25.0000000
## p95 30.0000000 30.0000000
## p99 43.0000000 43.0000000
## p100 50.0000000 50.0000000
plot(cigsPerDay2)

data2 <- cbind(data2,cigsPerDay2)
data2$cigsPerDay2 <- as.numeric(data2$cigsPerDay2)
data2 <- data2[,c(-16)]
data2<-dplyr::rename(data2,"cigsPerDay"="cigsPerDay2")
boxplot(data2$cigsPerDay)

#############################################################
###################tot Cholesterol####################
totChol1 <- imputate_outlier(data2,totChol,method ="capping")
summary(totChol1)
## Warning: `cols` is now required.
## Please use `cols = c(statistic)`
## Impute outliers with capping
##
## * Information of Imputation (before vs after)
## Original Imputation
## n 4190.0000000 4190.0000000
## na 50.0000000 50.0000000
## mean 236.6995227 235.8004773
## sd 44.5912839 41.5255437
## se_mean 0.6888790 0.6415172
## IQR 57.0000000 57.0000000
## skewness 0.8718806 0.2357734
## kurtosis 4.1298894 -0.3682497
## p00 107.0000000 124.0000000
## p01 153.0000000 154.0000000
## p05 170.0000000 170.0000000
## p10 183.0000000 183.0000000
## p20 200.0000000 200.0000000
## p25 206.0000000 206.0000000
## p30 212.0000000 212.0000000
## p40 223.0000000 223.0000000
## p50 234.0000000 234.0000000
## p60 244.0000000 244.0000000
## p70 257.0000000 257.0000000
## p75 263.0000000 263.0000000
## p80 271.0000000 271.0000000
## p90 292.0000000 292.0000000
## p95 312.0000000 312.0000000
## p99 354.1100000 334.1100000
## p100 696.0000000 347.0000000
plot(totChol1)

data2 <- cbind(data2,totChol1)
data2$totChol1 <- as.numeric(data2$totChol1)
data2 <- data2[,c(-8)]
totChol2 <- imputate_na(data2,totChol1,method = "mice",seed=100,print_flag = FALSE)
summary(totChol2)
## Warning: `cols` is now required.
## Please use `cols = c(statistic)`
## * Impute missing values based on Multivariate Imputation by Chained Equations
## - method : mice
## - random seed : 100
##
## * Information of Imputation (before vs after)
## Original Imputation
## n 4190.0000000 4240.0000000
## na 50.0000000 0.0000000
## mean 235.8004773 235.7934434
## sd 41.5255437 41.3286665
## se_mean 0.6415172 0.6347000
## IQR 57.0000000 56.5500000
## skewness 0.2357734 0.2364781
## kurtosis -0.3682497 -0.3477068
## p00 124.0000000 124.0000000
## p01 154.0000000 154.0000000
## p05 170.0000000 170.0000000
## p10 183.0000000 184.0000000
## p20 200.0000000 200.0000000
## p25 206.0000000 206.0000000
## p30 212.0000000 212.0000000
## p40 223.0000000 223.0000000
## p50 234.0000000 234.0000000
## p60 244.0000000 244.0000000
## p70 257.0000000 256.3000000
## p75 263.0000000 262.5500000
## p80 271.0000000 271.0000000
## p90 292.0000000 292.0000000
## p95 312.0000000 312.0000000
## p99 334.1100000 334.0000000
## p100 347.0000000 347.0000000
plot(totChol2)

data2 <- cbind(data2,totChol2)
data2$totChol2 <- as.numeric(data2$totChol2)
data2 <- data2[,c(-16)]
data2<-dplyr::rename(data2,"totChol"="totChol2")
boxplot(data2$totChol)

#############################################################
###################BPMeds#############################################
data2$BPMeds <- as.factor(data2$BPMeds)
BPMeds1 <- imputate_na(data2,BPMeds,method = "mode",print_flag = FALSE)#seed = 100)
summary(BPMeds1)
## Impute missing values with mode
##
## * Information of Imputation (before vs after)
## original imputation original_percent imputation_percent
## 0 4063 4116 95.83 97.08
## 1 124 124 2.92 2.92
## <NA> 53 0 1.25 0.00
plot(BPMeds1)

data2 <- cbind(data2,BPMeds1)
data2$BPMeds1 <- as.factor(data2$BPMeds1)
data2 <- data2[,c(-4)]
data2<-dplyr::rename(data2,"BPMeds"="BPMeds1")
###############################################################
#####Log transformation on dataset
# find_skewness(data2,index = FALSE, value = TRUE)
#
# #sysBP, cigsPerDay, glucose are > 0.3 (threshold), hence will be transformed
#
# sysBPLog <- transform(data2$sysBP, method = "log")
#
# summary(sysBPLog)
#
# plot(sysBPLog)
#
# cigsPerDayLog <- transform(data2$cigsPerDay, method = "log")
#
# summary(cigsPerDayLog)
#
# #We find -Inf values, hence do log+1
# cigsPerDayLog <- transform(data2$cigsPerDay, method = "log+1")
#
# summary(cigsPerDayLog)
#
# plot(cigsPerDayLog)
#
# glucoseLog <- transform(data2$glucose, method = "log")
#
# summary(glucoseLog)
#
# plot(glucoseLog)
#
# sysBPLog <- as.numeric(sysBPLog)
# cigsPerDayLog <- as.numeric(cigsPerDayLog)
# glucoseLog <- as.numeric(glucoseLog)
#
# endtime <- Sys.time()
#
# print(endtime-starttime)
#
# trandata2 <- cbind(data2,sysBPLog,glucoseLog,cigsPerDayLog)
#
# find_skewness(trandata2,index = FALSE, value = TRUE)
# # Since the skewness has reduced, we will now remove the original vars of glucose, cigsperday, sysBP
# trandata2 <- trandata2[,c(-8,-11,-14)]
data2 <- data2[,c(1:6,8:16,7)]
write.csv(data2,file="C:/BigData/BABI/Capstone/data2.csv",col.names=TRUE)
## Warning in write.csv(data2, file = "C:/BigData/BABI/Capstone/data2.csv", :
## attempt to set 'col.names' ignored
Binning(6)
binage <- cut(data2$age,breaks=c(30,40,50,60,70),labels=c("30t40","40t50","50t60","60t70"))
summary(binage)
## 30t40 40t50 50t60 60t70
## 748 1609 1304 579
plot(binage)

newdata2 <- cbind(data2,binage)
# newdata2 <- newdata2[,-2]
#Ranges as defined by medical terms <18 - Underweight, 18-24.9 - Normal, 24.9-29.9-Overweight,>30 Obese)
binBMI <- cut(data2$BMI,breaks=c(0,18.5,24.9,29.9,Inf),labels=c("Underweight","Normal","Overweight","Obese"))
summary(binBMI)
## Underweight Normal Overweight Obese
## 56 1844 1785 555
plot(binBMI)

newdata2 <- cbind(newdata2,binBMI)
newdata2$TenYearCHD <- as.factor(newdata2$TenYearCHD)
newdata2 <- newdata2[,c(1:15,17:18,16)]
#
# newdata2 <- newdata2[,-9]
# newdata2 <- newdata2[,c(1:5,7:16,6)]
#Binning for transformed dataset
# binageT <- cut(trandata2$age,breaks=c(30,40,50,60,70),labels=c("30t40","40t50","50t60","60t70"))
# summary(binageT)
# plot(binageT)
#
# trandata2 <- cbind(trandata2,binageT)
#
# trandata2 <- trandata2[,-2]
#
#
#
# binBMIT <- cut(trandata2$BMI,breaks=c(0,18.5,24.9,29.9,Inf),labels=c("Underweight","Normal","Overweight","Obese"))
# summary(binBMIT)
# plot(binBMIT)
#
# trandata2 <- cbind(trandata2,binBMIT)
#
# trandata2 <- trandata2[,-8]
# trandata2 <- trandata2[,c(1:5,7:16,6)]
#write.csv(newdata2,file="C:/BigData/BABI/Capstone/Coronory Heart Risk Study/newdata.csv",col.names = TRUE,row.names = TRUE)
#write.csv(trandata2,file="C:/BigData/BABI/Capstone/Coronory Heart Risk Study/trandata2.csv",col.names = TRUE,row.names = TRUE)
dmnewdata<- dummy_cols(newdata2, select_columns = c("male","prevStroke","education","diabetes","prevHyp","prevStroke","BPMeds","binage","binBMI"))
dmnewdata<- dummy_cols(newdata2, select_columns = c("male","prevStroke","education","diabetes","prevHyp","prevStroke","BPMeds"))
#dmnewdata <- dmnewdata[,c(-1,-2,-3,-4,-5,-7,-13,-15,-14)]
#dmnewdata <- dmnewdata[,c(1:6,8:29,7)]
dmnewdata <- dmnewdata[,c(-1,-3,-4,-5,-6,-8,-15)]
dmnewdata <- dmnewdata[,c(1:8,10:23,9)]
#dmnewdata1 <- dummy_cols(newdata2,select_columns = c("education","binage","binBMI"))
#dmnewdata1 <- dmnewdata1[,c(-2,-7,-14,-15)]
#dmnewdata1 <- dmnewdata1[,c(1:11,13:24,12)]
Logistic Regression(11)
set.seed(123)
sample3 <- createDataPartition(newdata2$TenYearCHD,p=0.7,list = FALSE)
trainLr <- newdata2[sample3, ]
testLr <- newdata2[-sample3,]
logitmod3 <- glm(TenYearCHD ~ ., family = "binomial", data=trainLr)
pred3L <- predict(logitmod3, newdata = testLr, type = "response")
y_pred_num3<- ifelse(pred3L > 0.5, 1, 0)
y_pred3 <- factor(y_pred_num3, levels=c(0, 1))
y_act3 <- factor(testLr$TenYearCHD)
caret::confusionMatrix(y_pred3,y_act3, positive="1", mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction 0 1
## 0 1066 178
## 1 12 15
##
## Accuracy : 0.8505
## 95% CI : (0.8297, 0.8697)
## No Information Rate : 0.8482
## P-Value [Acc > NIR] : 0.426
##
## Kappa : 0.1029
##
## Mcnemar's Test P-Value : <2e-16
##
## Sensitivity : 0.07772
## Specificity : 0.98887
## Pos Pred Value : 0.55556
## Neg Pred Value : 0.85691
## Precision : 0.55556
## Recall : 0.07772
## F1 : 0.13636
## Prevalence : 0.15185
## Detection Rate : 0.01180
## Detection Prevalence : 0.02124
## Balanced Accuracy : 0.53329
##
## 'Positive' Class : 1
##
InformationValue::plotROC(y_act3,c(as.numeric(y_pred3)))

summary(logitmod3)
##
## Call:
## glm(formula = TenYearCHD ~ ., family = "binomial", data = trainLr)
##
## Deviance Residuals:
## Min 1Q Median 3Q Max
## -1.3721 -0.6018 -0.4259 -0.2783 3.0322
##
## Coefficients:
## Estimate Std. Error z value Pr(>|z|)
## (Intercept) -8.305334 1.369839 -6.063 1.34e-09 ***
## male1 0.618149 0.125060 4.943 7.70e-07 ***
## age 0.065568 0.020890 3.139 0.001697 **
## curSmoker1 -0.213682 0.185169 -1.154 0.248507
## prevStroke1 0.622278 0.528791 1.177 0.239278
## prevHyp1 0.184967 0.162913 1.135 0.256218
## diabetes1 0.909291 0.265944 3.419 0.000628 ***
## glucose 0.001917 0.004660 0.411 0.680745
## education2 -0.162058 0.137341 -1.180 0.238014
## education3 -0.130386 0.167789 -0.777 0.437110
## education4 0.099000 0.174260 0.568 0.569954
## BMI -0.004933 0.037578 -0.131 0.895566
## sysBP 0.019610 0.004928 3.979 6.92e-05 ***
## diaBP -0.002644 0.007844 -0.337 0.736092
## heartRate -0.003863 0.005002 -0.772 0.439889
## cigsPerDay 0.028152 0.007528 3.740 0.000184 ***
## totChol 0.002390 0.001372 1.742 0.081480 .
## BPMeds1 0.265891 0.268707 0.990 0.322409
## binage40t50 0.125874 0.268105 0.469 0.638716
## binage50t60 0.077465 0.416135 0.186 0.852323
## binage60t70 -0.215688 0.568053 -0.380 0.704170
## binBMINormal 0.001740 0.591494 0.003 0.997652
## binBMIOverweight -0.177787 0.662442 -0.268 0.788406
## binBMIObese -0.154027 0.783439 -0.197 0.844137
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## (Dispersion parameter for binomial family taken to be 1)
##
## Null deviance: 2529.6 on 2968 degrees of freedom
## Residual deviance: 2240.2 on 2945 degrees of freedom
## AIC: 2288.2
##
## Number of Fisher Scoring iterations: 5
anova(logitmod3,test="Chisq")
## Analysis of Deviance Table
##
## Model: binomial, link: logit
##
## Response: TenYearCHD
##
## Terms added sequentially (first to last)
##
##
## Df Deviance Resid. Df Resid. Dev Pr(>Chi)
## NULL 2968 2529.6
## male 1 30.715 2967 2498.8 2.989e-08 ***
## age 1 142.636 2966 2356.2 < 2.2e-16 ***
## curSmoker 1 5.789 2965 2350.4 0.0161246 *
## prevStroke 1 3.503 2964 2346.9 0.0612616 .
## prevHyp 1 40.236 2963 2306.7 2.251e-10 ***
## diabetes 1 14.688 2962 2292.0 0.0001269 ***
## glucose 1 0.037 2961 2292.0 0.8466871
## education 3 2.128 2958 2289.8 0.5463286
## BMI 1 0.259 2957 2289.6 0.6104785
## sysBP 1 25.675 2956 2263.9 4.039e-07 ***
## diaBP 1 0.092 2955 2263.8 0.7618733
## heartRate 1 0.218 2954 2263.6 0.6406291
## cigsPerDay 1 14.668 2953 2248.9 0.0001282 ***
## totChol 1 3.645 2952 2245.3 0.0562352 .
## BPMeds 1 0.972 2951 2244.3 0.3240916
## binage 3 3.027 2948 2241.3 0.3874297
## binBMI 3 1.120 2945 2240.2 0.7722576
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
exp(coef(logitmod3))
## (Intercept) male1 age curSmoker1
## 0.0002471947 1.8554907347 1.0677657533 0.8076051233
## prevStroke1 prevHyp1 diabetes1 glucose
## 1.8631683216 1.2031791522 2.4825613748 1.0019193293
## education2 education3 education4 BMI
## 0.8503917749 0.8777569046 1.1040667860 0.9950795185
## sysBP diaBP heartRate cigsPerDay
## 1.0198030501 0.9973598446 0.9961442698 1.0285518022
## totChol BPMeds1 binage40t50 binage50t60
## 1.0023927015 1.3045925944 1.1341390961 1.0805449423
## binage60t70 binBMINormal binBMIOverweight binBMIObese
## 0.8059867035 1.0017419924 0.8371206678 0.8572486011
exp(coef(logitmod3))/(1+exp(coef(logitmod3)))
## (Intercept) male1 age curSmoker1
## 0.0002471336 0.6497974979 0.5163862259 0.4467818291
## prevStroke1 prevHyp1 diabetes1 glucose
## 0.6507365660 0.5461104473 0.7128550247 0.5004793723
## education2 education3 education4 BMI
## 0.4595739056 0.4674497015 0.5247299151 0.4987668458
## sysBP diaBP heartRate cigsPerDay
## 0.5049022230 0.4993390887 0.4990342055 0.5070374841
## totChol BPMeds1 binage40t50 binage50t60
## 0.5005974606 0.5660838265 0.5314269806 0.5193566937
## binage60t70 binBMINormal binBMIOverweight binBMIObese
## 0.4462860673 0.5004351191 0.4556699418 0.4615691193
PseudoR2(logitmod3,c("McFadden", "Nagel"))
## McFadden Nagelkerke
## 0.1144105 0.1619653
lrtest(logitmod3)
## Likelihood ratio test
##
## Model 1: TenYearCHD ~ male + age + curSmoker + prevStroke + prevHyp +
## diabetes + glucose + education + BMI + sysBP + diaBP + heartRate +
## cigsPerDay + totChol + BPMeds + binage + binBMI
## Model 2: TenYearCHD ~ 1
## #Df LogLik Df Chisq Pr(>Chisq)
## 1 24 -1120.1
## 2 1 -1264.8 -23 289.41 < 2.2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
NaiveBayes(12)
set.seed(1234)
samplen <- createDataPartition(newdata2$TenYearCHD,p=0.7,list = FALSE)
trainNb <- newdata2[samplen, ]
testNb <- newdata2[-samplen,]
#nbmod<-naiveBayes(x=trainN[,1:23], y=trainN[,24])
nbmod<-naiveBayes(x=trainNb[,1:17], y=trainNb[,18])
pred_nb<-predict(nbmod,newdata = testNb[,1:17])
table(pred_nb,testNb[,18])
##
## pred_nb 0 1
## 0 924 120
## 1 154 73
caret::confusionMatrix(pred_nb,testNb$TenYearCHD,positive="1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction 0 1
## 0 924 120
## 1 154 73
##
## Accuracy : 0.7844
## 95% CI : (0.7608, 0.8067)
## No Information Rate : 0.8482
## P-Value [Acc > NIR] : 1.0000
##
## Kappa : 0.2195
##
## Mcnemar's Test P-Value : 0.0462
##
## Sensitivity : 0.37824
## Specificity : 0.85714
## Pos Pred Value : 0.32159
## Neg Pred Value : 0.88506
## Precision : 0.32159
## Recall : 0.37824
## F1 : 0.34762
## Prevalence : 0.15185
## Detection Rate : 0.05744
## Detection Prevalence : 0.17860
## Balanced Accuracy : 0.61769
##
## 'Positive' Class : 1
##
InformationValue::plotROC(testNb$TenYearCHD,c(as.numeric(pred_nb)),Show.labels = T)
## Warning: Removed 101 rows containing missing values (geom_text).

pred_nb1<-predict(nbmod,newdata = trainNb[,1:17])
caret::confusionMatrix(pred_nb1,trainNb$TenYearCHD,positive="1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction 0 1
## 0 2143 265
## 1 375 186
##
## Accuracy : 0.7844
## 95% CI : (0.7692, 0.7991)
## No Information Rate : 0.8481
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.2395
##
## Mcnemar's Test P-Value : 1.643e-05
##
## Sensitivity : 0.41242
## Specificity : 0.85107
## Pos Pred Value : 0.33155
## Neg Pred Value : 0.88995
## Precision : 0.33155
## Recall : 0.41242
## F1 : 0.36759
## Prevalence : 0.15190
## Detection Rate : 0.06265
## Detection Prevalence : 0.18895
## Balanced Accuracy : 0.63174
##
## 'Positive' Class : 1
##
SVM Classification(14)
set.seed(123)
sample3 <- createDataPartition(newdata2$TenYearCHD,p=0.7,list = FALSE)
trainSv <- newdata2[sample3, ]
testSv <- newdata2[-sample3,]
starttime <- Sys.time()
svmModel3 <- e1071::svm(TenYearCHD~.,data = trainSv,kernel="polynomial",scale=FALSE,gamma=0.1,coef0=1)
predsvm <- predict(svmModel3,newdata = testSv)
endtime <- Sys.time()
print(endtime-starttime)
## Time difference of 4.534689 mins
caret::confusionMatrix(predsvm,testSv$TenYearCHD,positive="1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction 0 1
## 0 1023 177
## 1 55 16
##
## Accuracy : 0.8175
## 95% CI : (0.7951, 0.8383)
## No Information Rate : 0.8482
## P-Value [Acc > NIR] : 0.9987
##
## Kappa : 0.0431
##
## Mcnemar's Test P-Value : 1.957e-15
##
## Sensitivity : 0.08290
## Specificity : 0.94898
## Pos Pred Value : 0.22535
## Neg Pred Value : 0.85250
## Precision : 0.22535
## Recall : 0.08290
## F1 : 0.12121
## Prevalence : 0.15185
## Detection Rate : 0.01259
## Detection Prevalence : 0.05586
## Balanced Accuracy : 0.51594
##
## 'Positive' Class : 1
##
####Tuning#####
#tuned_param <- tune.svm(TenYearCHD~.,data=train2S,gamma = 10^(-5:-1), cost = 10^(-3:1))
xgbTree from caret package (15)
set.seed(101)
#dmnewdata2 <- dummy_columns(newdata2,select_columns = c("education","BPMeds","prevStroke","prevHyp","diabetes","binageD","binBMID"))
#dmnewdata2 <- dmnewdata2[,c(-3,-6,-7,-8,-9,-17,-18)]
boostdata1 <- dmnewdata
sample3 <- createDataPartition(boostdata1$TenYearCHD,p=0.7,list = FALSE)
trainXG <- boostdata1[sample3, ]
testXG <- boostdata1[-sample3,]
levels(trainXG$TenYearCHD) <- make.names(levels(trainXG$TenYearCHD))
levels(testXG$TenYearCHD) <- make.names(levels(testXG$TenYearCHD))
trnoutput_vector <- trainXG[,"TenYearCHD"]
tesoutput_vector <- testXG[,"TenYearCHD"]
xgb_trcontrol <- trainControl(
method = "repeatedcv",
number = 10,
repeats = 3,
#allowParallel = TRUE,
verboseIter = FALSE,
#returnData = FALSE,
summaryFunction = twoClassSummary,
classProbs = TRUE,
savePredictions=TRUE,
sampling = "smote"
)
# xgbGrid <- expand.grid(nrounds = c(25,50,75),
# max_depth = 4:7,
# colsample_bytree = c(0.3,0.4,0.5),
# eta = c(0.05,0.1,0.3),
# gamma=0,
# min_child_weight = c(2.0,2.25),
# subsample = 1
# )
xgbGrid <- expand.grid(nrounds = 50,
max_depth = 4,
colsample_bytree = 0.3,
eta = 0.05,
gamma=0,
min_child_weight = 2,
subsample = 1
)
#set.seed(0)
#reset.seed()
#numberofcores = detectCores() # review what number of cores does for your environment
#cl <- makeCluster(numberofcores, type = "SOCK")
# Register cluster so that caret will know to train in parallel.
#registerDoSNOW(cl)
starttime <- Sys.time()
xgb_model <- caret::train(TenYearCHD~.,data=trainXG,trControl = xgb_trcontrol,tuneGrid = xgbGrid,method = "xgbTree",preProcess = c("center","scale"))
## Warning in train.default(x, y, weights = w, ...): The metric "Accuracy" was
## not in the result set. ROC will be used instead.
#stopCluster(cl)
predicted <- predict(xgb_model, testXG)
endtime <- Sys.time()
print(endtime-starttime)
## Time difference of 25.66165 secs
caret::confusionMatrix(predicted,tesoutput_vector, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 923 114
## X1 155 79
##
## Accuracy : 0.7884
## 95% CI : (0.7649, 0.8105)
## No Information Rate : 0.8482
## P-Value [Acc > NIR] : 1.00000
##
## Kappa : 0.2442
##
## Mcnemar's Test P-Value : 0.01473
##
## Sensitivity : 0.40933
## Specificity : 0.85622
## Pos Pred Value : 0.33761
## Neg Pred Value : 0.89007
## Precision : 0.33761
## Recall : 0.40933
## F1 : 0.37002
## Prevalence : 0.15185
## Detection Rate : 0.06216
## Detection Prevalence : 0.18411
## Balanced Accuracy : 0.63277
##
## 'Positive' Class : X1
##
#xgb_model$finalModel
#xgb.plot.tree(model = xgb_model)
importanceC <- xgb.importance(feature_names = colnames(xgb_model$finalModel$feature_names), model = xgb_model$finalModel)
xgb.ggplot.importance(importanceC)

caret::varImp(xgb_model,useModel=TRUE,scale=FALSE)
## xgbTree variable importance
##
## only 20 most important variables shown (out of 26)
##
## Overall
## age 0.171159
## sysBP 0.160456
## cigsPerDay 0.106260
## education_1 0.080496
## heartRate 0.063157
## prevHyp_1 0.052085
## binage50t60 0.046288
## totChol 0.041807
## BMI 0.041790
## male_1 0.036981
## glucose 0.034336
## male_0 0.030788
## binage60t70 0.026763
## binage40t50 0.026182
## diaBP 0.020069
## prevHyp_0 0.016404
## diabetes_1 0.013742
## education_3 0.009618
## diabetes_0 0.009027
## education_2 0.005769
plot(caret::varImp(xgb_model,useModel=TRUE,scale=FALSE))

#xgb.plot.multi.trees(feature_names = names(xgb_model$finalModel$feature_names),model = xgb_model$finalModel)
#xgb.plot.tree(feature_names = xgb_model$finalModel$feature_names, model = xgb_model$finalModel)
xgbres <- evalm(xgb_model)
## ***MLeval: Machine Learning Model Evaluation in R***
## Input: caret train function object
## Averaging probs.
## Group 1 type: repeatedcv
## Observations: 2969
## Number of groups: 1
## Observations per group: 2969
## Positive: X1
## Negative: X0
## Group: Group 1
## Positive: 451
## Negative: 2518
## ***Performance Metrics***



## Group 1 Optimal Informedness = 0.302663395613666
## Group 1 AUC-ROC = 0.69

predicted1 <- predict(xgb_model, trainXG)
caret::confusionMatrix(predicted1,trnoutput_vector, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 2143 239
## X1 375 212
##
## Accuracy : 0.7932
## 95% CI : (0.7782, 0.8076)
## No Information Rate : 0.8481
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.2858
##
## Mcnemar's Test P-Value : 5.089e-08
##
## Sensitivity : 0.4701
## Specificity : 0.8511
## Pos Pred Value : 0.3612
## Neg Pred Value : 0.8997
## Precision : 0.3612
## Recall : 0.4701
## F1 : 0.4085
## Prevalence : 0.1519
## Detection Rate : 0.0714
## Detection Prevalence : 0.1977
## Balanced Accuracy : 0.6606
##
## 'Positive' Class : X1
##
LogitBoost Caret(16)
set.seed(108)
sample2 <- createDataPartition(newdata2$TenYearCHD,p=0.7,list = FALSE)
train2L <- newdata2[sample2, ]
test2L <- newdata2[-sample2,]
levels(train2L$TenYearCHD) <- make.names(levels(train2L$TenYearCHD))
levels(test2L$TenYearCHD) <- make.names(levels(test2L$TenYearCHD))
repeats <- 5
numbers <- 10
tunel <- 10
x <- trainControl(method = "repeatedcv",
number = numbers,
repeats = repeats,
classProbs = TRUE,
summaryFunction = twoClassSummary,
sampling = "smote",
verboseIter = FALSE,
savePredictions = TRUE
)
starttime <- Sys.time()
lmmodel <- caret::train(TenYearCHD~., data = train2L, method = "LogitBoost",
#preProcess = c("center","scale"),
trControl = x,
metric = "ROC",
tuneLength = tunel)
# Summary of model
lgbmpredict <- predict(lmmodel,newdata = test2L)
endtime <- Sys.time()
print(endtime-starttime)
## Time difference of 1.136121 mins
plot(lmmodel)

plot(lmmodel, print.thres = 0.5, type="S")

caret::varImp(lmmodel,scale=F)
## ROC curve variable importance
##
## Importance
## age 0.6893
## binage 0.6712
## sysBP 0.6456
## prevHyp 0.6066
## diaBP 0.5903
## totChol 0.5736
## male 0.5708
## glucose 0.5597
## BMI 0.5571
## binBMI 0.5519
## cigsPerDay 0.5360
## education 0.5350
## diabetes 0.5222
## BPMeds 0.5186
## curSmoker 0.5185
## heartRate 0.5154
## prevStroke 0.5038
plot(caret::varImp(lmmodel,scale=F))

caret::confusionMatrix(lgbmpredict,test2L$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 888 133
## X1 190 60
##
## Accuracy : 0.7459
## 95% CI : (0.721, 0.7696)
## No Information Rate : 0.8482
## P-Value [Acc > NIR] : 1.000000
##
## Kappa : 0.1201
##
## Mcnemar's Test P-Value : 0.001834
##
## Sensitivity : 0.31088
## Specificity : 0.82375
## Pos Pred Value : 0.24000
## Neg Pred Value : 0.86974
## Precision : 0.24000
## Recall : 0.31088
## F1 : 0.27088
## Prevalence : 0.15185
## Detection Rate : 0.04721
## Detection Prevalence : 0.19670
## Balanced Accuracy : 0.56731
##
## 'Positive' Class : X1
##
lgbmpredict1 <- predict(lmmodel,newdata = train2L)
caret::confusionMatrix(lgbmpredict1,train2L$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 2129 294
## X1 389 157
##
## Accuracy : 0.77
## 95% CI : (0.7544, 0.785)
## No Information Rate : 0.8481
## P-Value [Acc > NIR] : 1.0000000
##
## Kappa : 0.1782
##
## Mcnemar's Test P-Value : 0.0003221
##
## Sensitivity : 0.34812
## Specificity : 0.84551
## Pos Pred Value : 0.28755
## Neg Pred Value : 0.87866
## Precision : 0.28755
## Recall : 0.34812
## F1 : 0.31494
## Prevalence : 0.15190
## Detection Rate : 0.05288
## Detection Prevalence : 0.18390
## Balanced Accuracy : 0.59681
##
## 'Positive' Class : X1
##
lres <- evalm(lmmodel)
## ***MLeval: Machine Learning Model Evaluation in R***
## Input: caret train function object
## Averaging probs.
## Group 1 type: repeatedcv
## Observations: 2969
## Number of groups: 1
## Observations per group: 2969
## Positive: X1
## Negative: X0
## Group: Group 1
## Positive: 451
## Negative: 2518
## ***Performance Metrics***



## Group 1 Optimal Informedness = 0.256928826418743
## Group 1 AUC-ROC = 0.66

NB Caret(17)
set.seed(108)
sample2 <- createDataPartition(newdata2$TenYearCHD,p=0.7,list = FALSE)
train2N <- newdata2[sample2, ]
test2N <- newdata2[-sample2,]
levels(train2N$TenYearCHD) <- make.names(levels(train2N$TenYearCHD))
levels(test2N$TenYearCHD) <- make.names(levels(test2N$TenYearCHD))
repeats <- 3
numbers <- 10
tunel <- 10
x <- trainControl(method = "repeatedcv",
number = numbers,
repeats = repeats,
classProbs = TRUE,
summaryFunction = twoClassSummary,
sampling = "smote",
verboseIter = FALSE,
savePredictions = TRUE
)
starttime <- Sys.time()
nbcmodel <- caret::train(TenYearCHD~., data = train2N, method = "naive_bayes",
#
trControl = x,
metric = "ROC",
tuneLength = tunel)
# Summary of model
nbcpredict <- predict(nbcmodel,newdata = test2N)
endtime <- Sys.time()
print(endtime-starttime)
## Time difference of 25.79849 secs
plot(nbcmodel, print.thres = 0.5, type="S")

plot(nbcmodel)

imp <- caret::varImp(nbcmodel,useModel=TRUE,scale=FALSE)
plot(imp)

caret::confusionMatrix(nbcpredict,test2N$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 815 90
## X1 263 103
##
## Accuracy : 0.7223
## 95% CI : (0.6968, 0.7467)
## No Information Rate : 0.8482
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.2118
##
## Mcnemar's Test P-Value : <2e-16
##
## Sensitivity : 0.53368
## Specificity : 0.75603
## Pos Pred Value : 0.28142
## Neg Pred Value : 0.90055
## Precision : 0.28142
## Recall : 0.53368
## F1 : 0.36852
## Prevalence : 0.15185
## Detection Rate : 0.08104
## Detection Prevalence : 0.28796
## Balanced Accuracy : 0.64485
##
## 'Positive' Class : X1
##
res <- evalm(nbcmodel)
## ***MLeval: Machine Learning Model Evaluation in R***
## Input: caret train function object
## Averaging probs.
## Group 1 type: repeatedcv
## Observations: 2969
## Number of groups: 1
## Observations per group: 2969
## Positive: X1
## Negative: X0
## Group: Group 1
## Positive: 451
## Negative: 2518
## ***Performance Metrics***



## Group 1 Optimal Informedness = 0.321926915564917
## Group 1 AUC-ROC = 0.7

nbovpredict <- predict(nbcmodel,train2N)
caret::confusionMatrix(nbovpredict,train2N$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 1928 222
## X1 590 229
##
## Accuracy : 0.7265
## 95% CI : (0.7101, 0.7425)
## No Information Rate : 0.8481
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.2048
##
## Mcnemar's Test P-Value : <2e-16
##
## Sensitivity : 0.50776
## Specificity : 0.76569
## Pos Pred Value : 0.27961
## Neg Pred Value : 0.89674
## Precision : 0.27961
## Recall : 0.50776
## F1 : 0.36063
## Prevalence : 0.15190
## Detection Rate : 0.07713
## Detection Prevalence : 0.27585
## Balanced Accuracy : 0.63672
##
## 'Positive' Class : X1
##
Random Forest in caret(18)
set.seed(108)
sample2 <- createDataPartition(newdata2$TenYearCHD,p=0.7,list = FALSE)
train2R <- newdata2[sample2, ]
test2R <- newdata2[-sample2,]
levels(train2R$TenYearCHD) <- make.names(levels(train2R$TenYearCHD))
levels(test2R$TenYearCHD) <- make.names(levels(test2R$TenYearCHD))
repeats <- 3
numbers <- 10
tunel <- 10
x <- trainControl(method = "repeatedcv",
number = numbers,
repeats = repeats,
classProbs = TRUE,
summaryFunction = twoClassSummary,
sampling = "smote",
verboseIter = FALSE,
savePredictions = TRUE,
search = "random"
)
starttime <- Sys.time()
mtry <- sqrt(ncol(newdata2))
tunegrid <- expand.grid(.mtry = mtry)
rfmodel <- caret::train(TenYearCHD~., data = train2R, method = "rf",
#
trControl = x,
metric = "ROC",
tuneLength = tunel,tunegrid=tunegrid,ntree=15)
# Summary of model
rfpredict <- predict(rfmodel,newdata = test2R)
endtime <- Sys.time()
print(endtime-starttime)
## Time difference of 2.134187 mins
plot(rfmodel, print.thres = 0.5, type="S")

plot(rfmodel)

rfimp <- caret::varImp(rfmodel,useModel=TRUE,scale=FALSE)
plot(rfimp)

# Get the row names of the variable importance data
rownames(rfimp$importance)
## [1] "male1" "age" "curSmoker1"
## [4] "prevStroke1" "prevHyp1" "diabetes1"
## [7] "glucose" "education2" "education3"
## [10] "education4" "BMI" "sysBP"
## [13] "diaBP" "heartRate" "cigsPerDay"
## [16] "totChol" "BPMeds1" "binage40t50"
## [19] "binage50t60" "binage60t70" "binBMINormal"
## [22] "binBMIOverweight" "binBMIObese"
# Convert the variable importance data into a dataframe
importance <- data.frame(rownames(rfimp$importance), rfimp$importance$Overall)
# Relabel the data
names(importance)<-c('CHD', 'Importance')
# Order the data from greatest importance to least important
#importance <- transform(importance, CHD = reorder(CHD, Importance))
# Plot the data with ggplot.
ggplot(data=importance, aes(x=CHD, y=Importance)) +
geom_bar(stat = 'identity',colour = "blue", fill = "white") + coord_flip()

#varImpPlot(rfmodel,n.var = min(10,nrow(rfmodel$importance)),scale = TRUE,main="Top 10 Variable of importance",sort=TRUE)
# plot(rfmodel$finalModel)
# legend("topright", c("OOB", "0", "1"), text.col=1:6, lty=1:3, col=1:3)
# title(main="Error Rates Random Forest for CHD")
#tree_num <- rfmodel$finalModel$forest$ndbigtree
caret::confusionMatrix(rfpredict,test2R$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 909 116
## X1 169 77
##
## Accuracy : 0.7758
## 95% CI : (0.7518, 0.7984)
## No Information Rate : 0.8482
## P-Value [Acc > NIR] : 1.000000
##
## Kappa : 0.2177
##
## Mcnemar's Test P-Value : 0.002069
##
## Sensitivity : 0.39896
## Specificity : 0.84323
## Pos Pred Value : 0.31301
## Neg Pred Value : 0.88683
## Precision : 0.31301
## Recall : 0.39896
## F1 : 0.35080
## Prevalence : 0.15185
## Detection Rate : 0.06058
## Detection Prevalence : 0.19355
## Balanced Accuracy : 0.62110
##
## 'Positive' Class : X1
##
rfpredict1 <- predict(rfmodel,newdata = train2R)
caret::confusionMatrix(rfpredict1,train2R$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 2200 221
## X1 318 230
##
## Accuracy : 0.8185
## 95% CI : (0.8041, 0.8322)
## No Information Rate : 0.8481
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.3526
##
## Mcnemar's Test P-Value : 3.549e-05
##
## Sensitivity : 0.50998
## Specificity : 0.87371
## Pos Pred Value : 0.41971
## Neg Pred Value : 0.90872
## Precision : 0.41971
## Recall : 0.50998
## F1 : 0.46046
## Prevalence : 0.15190
## Detection Rate : 0.07747
## Detection Prevalence : 0.18457
## Balanced Accuracy : 0.69184
##
## 'Positive' Class : X1
##
# tree_func <- function(final_model,
# tree_num) {
#
# # get tree by index
# tree <- randomForest::getTree(final_model,
# k = tree_num,
# labelVar = TRUE) %>%
# tibble::rownames_to_column() %>%
# # make leaf split points to NA, so the 0s won't get plotted
# mutate(`split point` = ifelse(is.na(prediction), `split point`, NA))
#
# # prepare data frame for graph
# graph_frame <- data.frame(from = rep(tree$rowname, 2),
# to = c(tree$`left daughter`, tree$`right daughter`))
#
# # convert to graph and delete the last node that we don't want to plot
# graph <- graph_from_data_frame(graph_frame) %>%
# delete_vertices("0")
#
# # set node labels
# V(graph)$node_label <- gsub("_", " ", as.character(tree$`split var`))
# V(graph)$leaf_label <- as.character(tree$prediction)
# V(graph)$split <- as.character(round(tree$`split point`, digits = 2))
#
# # plot
# plot <- ggraph(graph, 'dendogram') +
# theme_bw() +
# geom_edge_link() +
# geom_node_point() +
# geom_node_text(aes(label = node_label), na.rm = TRUE, repel = TRUE) +
# geom_node_label(aes(label = split), vjust = 2.5, na.rm = TRUE, fill = "white") +
# geom_node_label(aes(label = leaf_label, fill = leaf_label), na.rm = TRUE,
# repel = TRUE, colour = "white", fontface = "bold", show.legend = FALSE) +
# theme(panel.grid.minor = element_blank(),
# panel.grid.major = element_blank(),
# panel.background = element_blank(),
# plot.background = element_rect(fill = "white"),
# panel.border = element_blank(),
# axis.line = element_blank(),
# axis.text.x = element_blank(),
# axis.text.y = element_blank(),
# axis.ticks = element_blank(),
# axis.title.x = element_blank(),
# axis.title.y = element_blank(),
# plot.title = element_text(size = 18))
#
# print(plot)
# }
# tree_func(rfmodel$finalModel,tree_num)
rfres <- evalm(rfmodel)
## ***MLeval: Machine Learning Model Evaluation in R***
## Input: caret train function object
## Averaging probs.
## Group 1 type: repeatedcv
## Observations: 2969
## Number of groups: 1
## Observations per group: 2969
## Positive: X1
## Negative: X0
## Group: Group 1
## Positive: 451
## Negative: 2518
## ***Performance Metrics***



## Group 1 Optimal Informedness = 0.30398514289136
## Group 1 AUC-ROC = 0.7

CTREE(19)
set.seed(108)
sample2 <- createDataPartition(newdata2$TenYearCHD,p=0.7,list = FALSE)
train2CT <- newdata2[sample2, ]
test2CT <- newdata2[-sample2,]
levels(train2CT$TenYearCHD) <- make.names(levels(train2CT$TenYearCHD))
levels(test2CT$TenYearCHD) <- make.names(levels(test2CT$TenYearCHD))
repeats <- 3
numbers <- 10
tunel <- 10
x <- trainControl(method = "repeatedcv",
number = numbers,
repeats = repeats,
classProbs = TRUE,
summaryFunction = twoClassSummary,
sampling = "smote",
verboseIter = FALSE,
savePredictions = TRUE
)
starttime <- Sys.time()
#mtry <- sqrt(ncol(newdata2))
#tgrid <- expand.grid(.mtry = mtry)
ctrmodel <- caret::train(TenYearCHD~., data = train2CT, method = "ctree",
#
trControl = x,
metric = "ROC",
tuneLength = tunel)
#tunegrid=tgrid,
#ntree=5)
# Summary of model
ctpredict <- predict(ctrmodel,newdata = test2CT)
endtime <- Sys.time()
print(endtime-starttime)
## Time difference of 2.714082 mins
plot(ctrmodel, print.thres = 0.5, type="S")

imp <- caret::varImp(ctrmodel,useModel=TRUE,scale=FALSE)
plot(imp)

plot(ctrmodel$finalModel)

#plot(as.simpleparty(ctrmodel$finalModel))
caret::confusionMatrix(ctpredict,test2CT$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 811 95
## X1 267 98
##
## Accuracy : 0.7152
## 95% CI : (0.6895, 0.7399)
## No Information Rate : 0.8482
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.1904
##
## Mcnemar's Test P-Value : <2e-16
##
## Sensitivity : 0.5078
## Specificity : 0.7523
## Pos Pred Value : 0.2685
## Neg Pred Value : 0.8951
## Precision : 0.2685
## Recall : 0.5078
## F1 : 0.3513
## Prevalence : 0.1518
## Detection Rate : 0.0771
## Detection Prevalence : 0.2872
## Balanced Accuracy : 0.6300
##
## 'Positive' Class : X1
##
ctpredict1 <- predict(ctrmodel,newdata = train2CT)
caret::confusionMatrix(ctpredict1,train2CT$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 1945 188
## X1 573 263
##
## Accuracy : 0.7437
## 95% CI : (0.7276, 0.7593)
## No Information Rate : 0.8481
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.2633
##
## Mcnemar's Test P-Value : <2e-16
##
## Sensitivity : 0.58315
## Specificity : 0.77244
## Pos Pred Value : 0.31459
## Neg Pred Value : 0.91186
## Precision : 0.31459
## Recall : 0.58315
## F1 : 0.40870
## Prevalence : 0.15190
## Detection Rate : 0.08858
## Detection Prevalence : 0.28158
## Balanced Accuracy : 0.67779
##
## 'Positive' Class : X1
##
ctres <- evalm(ctrmodel)
## ***MLeval: Machine Learning Model Evaluation in R***
## Input: caret train function object
## Averaging probs.
## Group 1 type: repeatedcv
## Observations: 2969
## Number of groups: 1
## Observations per group: 2969
## Positive: X1
## Negative: X0
## Group: Group 1
## Positive: 451
## Negative: 2518
## ***Performance Metrics***



## Group 1 Optimal Informedness = 0.324576574164904
## Group 1 AUC-ROC = 0.7

ctres$roc

C45(20)
set.seed(108)
sample2 <- createDataPartition(newdata2$TenYearCHD,p=0.7,list = FALSE)
train2C5 <- newdata2[sample2, ]
test2C5 <- newdata2[-sample2,]
levels(train2C5$TenYearCHD) <- make.names(levels(train2C5$TenYearCHD))
levels(test2C5$TenYearCHD) <- make.names(levels(test2C5$TenYearCHD))
repeats <- 3
numbers <- 10
tunel <- 10
x <- trainControl(method = "repeatedcv",
number = numbers,
repeats = repeats,
classProbs = TRUE,
summaryFunction = twoClassSummary,
sampling = "smote",
verboseIter = FALSE,
savePredictions = TRUE
)
starttime <- Sys.time()
#mtry <- sqrt(ncol(newdata2))
#tgrid <- expand.grid(maxdepth = 25)
c5model <- caret::train(TenYearCHD~., data = train2C5, method = "rpart2",
#
trControl = x,
metric = "ROC",
tuneLength = tunel)
## note: only 4 possible values of the max tree depth from the initial fit.
## Truncating the grid to 4 .
#tunegrid=tgrid)
#ntree=5)
# Summary of model
c5predict <- predict(c5model,newdata = test2C5)
endtime <- Sys.time()
print(endtime-starttime)
## Time difference of 15.81627 secs
#plot(c5model, print.thres = 0.5, type="S")
imp <- caret::varImp(c5model,useModel=TRUE,scale=FALSE)
plot(imp)

fancyRpartPlot(c5model$finalModel,palettes=c("Blues","Oranges"))

caret::confusionMatrix(c5predict,test2C5$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 830 103
## X1 248 90
##
## Accuracy : 0.7238
## 95% CI : (0.6984, 0.7483)
## No Information Rate : 0.8482
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.1806
##
## Mcnemar's Test P-Value : 1.516e-14
##
## Sensitivity : 0.46632
## Specificity : 0.76994
## Pos Pred Value : 0.26627
## Neg Pred Value : 0.88960
## Precision : 0.26627
## Recall : 0.46632
## F1 : 0.33898
## Prevalence : 0.15185
## Detection Rate : 0.07081
## Detection Prevalence : 0.26593
## Balanced Accuracy : 0.61813
##
## 'Positive' Class : X1
##
c5predict1 <- predict(c5model,newdata = train2C5)
caret::confusionMatrix(c5predict1,train2C5$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 1963 195
## X1 555 256
##
## Accuracy : 0.7474
## 95% CI : (0.7314, 0.7629)
## No Information Rate : 0.8481
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.2615
##
## Mcnemar's Test P-Value : <2e-16
##
## Sensitivity : 0.56763
## Specificity : 0.77959
## Pos Pred Value : 0.31566
## Neg Pred Value : 0.90964
## Precision : 0.31566
## Recall : 0.56763
## F1 : 0.40571
## Prevalence : 0.15190
## Detection Rate : 0.08622
## Detection Prevalence : 0.27316
## Balanced Accuracy : 0.67361
##
## 'Positive' Class : X1
##
c45res <- evalm(c5model)
## ***MLeval: Machine Learning Model Evaluation in R***
## Input: caret train function object
## Averaging probs.
## Group 1 type: repeatedcv
## Observations: 2969
## Number of groups: 1
## Observations per group: 2969
## Positive: X1
## Negative: X0
## Group: Group 1
## Positive: 451
## Negative: 2518
## ***Performance Metrics***



## Group 1 Optimal Informedness = 0.334238273785727
## Group 1 AUC-ROC = 0.7

KNN(21)
set.seed(101)
sample3 <- createDataPartition(newdata2$TenYearCHD,p=0.7,list = FALSE)
trainKS <- newdata2[sample3, ]
testKS <- newdata2[-sample3,]
# Setting levels for both training and validation data
levels(trainKS$TenYearCHD) <- make.names(levels(trainKS$TenYearCHD))
levels(testKS$TenYearCHD) <- make.names(levels(testKS$TenYearCHD))
repeats <- 3
numbers <- 10
tunel <- 10
x <- trainControl(method = "repeatedcv",
number = numbers,
repeats = repeats,
classProbs = TRUE,
summaryFunction = twoClassSummary,
sampling = "smote",
verboseIter = FALSE,
savePredictions = TRUE
)
starttime <- Sys.time()
modelK2 <- caret::train(TenYearCHD~., data = trainKS, method = "knn",
preProcess = c("center","scale"),
trControl = x,
metric = "ROC",
tuneLength = tunel)
# Summary of model
knnpredict <- predict(modelK2,newdata = testKS)
endtime <- Sys.time()
print(endtime-starttime)
## Time difference of 2.670699 mins
#plot(modelK2)
plot(modelK2, print.thres = 0.5, type="S")

knnImp <- caret::varImp(modelK2,useModel=TRUE,scale=FALSE)
plot(knnImp)

caret::confusionMatrix(knnpredict,testKS$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 744 83
## X1 334 110
##
## Accuracy : 0.6719
## 95% CI : (0.6453, 0.6977)
## No Information Rate : 0.8482
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.1696
##
## Mcnemar's Test P-Value : <2e-16
##
## Sensitivity : 0.56995
## Specificity : 0.69017
## Pos Pred Value : 0.24775
## Neg Pred Value : 0.89964
## Precision : 0.24775
## Recall : 0.56995
## F1 : 0.34537
## Prevalence : 0.15185
## Detection Rate : 0.08655
## Detection Prevalence : 0.34933
## Balanced Accuracy : 0.63006
##
## 'Positive' Class : X1
##
knnpredict1 <- predict(modelK2,trainKS)
caret::confusionMatrix(knnpredict1,trainKS$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 1772 147
## X1 746 304
##
## Accuracy : 0.6992
## 95% CI : (0.6824, 0.7157)
## No Information Rate : 0.8481
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.2445
##
## Mcnemar's Test P-Value : <2e-16
##
## Sensitivity : 0.6741
## Specificity : 0.7037
## Pos Pred Value : 0.2895
## Neg Pred Value : 0.9234
## Precision : 0.2895
## Recall : 0.6741
## F1 : 0.4051
## Prevalence : 0.1519
## Detection Rate : 0.1024
## Detection Prevalence : 0.3537
## Balanced Accuracy : 0.6889
##
## 'Positive' Class : X1
##
knres <- evalm(modelK2)
## ***MLeval: Machine Learning Model Evaluation in R***
## Input: caret train function object
## Averaging probs.
## Group 1 type: repeatedcv
## Observations: 2969
## Number of groups: 1
## Observations per group: 2969
## Positive: X1
## Negative: X0
## Group: Group 1
## Positive: 451
## Negative: 2518
## ***Performance Metrics***



## Group 1 Optimal Informedness = 0.234290932338163
## Group 1 AUC-ROC = 0.65

GBM(22)
set.seed(101)
sample2 <- createDataPartition(newdata2$TenYearCHD,p=0.7,list = FALSE)
train2GB <- newdata2[sample2, ]
test2GB <- newdata2[-sample2,]
# Setting levels for both training and validation data
levels(train2GB$TenYearCHD) <- make.names(levels(train2GB$TenYearCHD))
levels(test2GB$TenYearCHD) <- make.names(levels(test2GB$TenYearCHD))
#repeats <- 3
numbers <- 10
tunel <- 10
x <- trainControl(method = "cv",
number = numbers,
#repeats = repeats,
classProbs = TRUE,
summaryFunction = twoClassSummary,
sampling = "smote",
verboseIter = TRUE
#savePredictions = TRUE
)
starttime <- Sys.time()
gbmodel <- caret::train(TenYearCHD~., data = train2GB, method = "gbm",
preProcess = c("center","scale"),
trControl = x,
metric = "ROC",
tuneLength = tunel)
## + Fold01: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3495 nan 0.1000 0.0078
## 2 1.3331 nan 0.1000 0.0064
## 3 1.3216 nan 0.1000 0.0051
## 4 1.3110 nan 0.1000 0.0049
## 5 1.3001 nan 0.1000 0.0046
## 6 1.2915 nan 0.1000 0.0039
## 7 1.2842 nan 0.1000 0.0035
## 8 1.2777 nan 0.1000 0.0024
## 9 1.2693 nan 0.1000 0.0033
## 10 1.2637 nan 0.1000 0.0026
## 20 1.2212 nan 0.1000 0.0011
## 40 1.1761 nan 0.1000 0.0003
## 60 1.1491 nan 0.1000 0.0006
## 80 1.1289 nan 0.1000 0.0001
## 100 1.1118 nan 0.1000 -0.0003
## 120 1.0976 nan 0.1000 0.0000
## 140 1.0855 nan 0.1000 -0.0002
## 160 1.0754 nan 0.1000 -0.0002
## 180 1.0662 nan 0.1000 -0.0000
## 200 1.0577 nan 0.1000 -0.0003
## 220 1.0504 nan 0.1000 -0.0002
## 240 1.0424 nan 0.1000 -0.0001
## 260 1.0352 nan 0.1000 -0.0001
## 280 1.0283 nan 0.1000 -0.0004
## 300 1.0224 nan 0.1000 0.0001
## 320 1.0161 nan 0.1000 -0.0001
## 340 1.0095 nan 0.1000 -0.0000
## 360 1.0027 nan 0.1000 -0.0001
## 380 0.9981 nan 0.1000 -0.0001
## 400 0.9928 nan 0.1000 -0.0003
## 420 0.9874 nan 0.1000 -0.0002
## 440 0.9836 nan 0.1000 -0.0002
## 460 0.9786 nan 0.1000 -0.0002
## 480 0.9733 nan 0.1000 -0.0000
## 500 0.9684 nan 0.1000 0.0000
##
## - Fold01: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## + Fold01: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3441 nan 0.1000 0.0105
## 2 1.3233 nan 0.1000 0.0093
## 3 1.3075 nan 0.1000 0.0073
## 4 1.2917 nan 0.1000 0.0064
## 5 1.2798 nan 0.1000 0.0056
## 6 1.2683 nan 0.1000 0.0048
## 7 1.2595 nan 0.1000 0.0037
## 8 1.2503 nan 0.1000 0.0030
## 9 1.2420 nan 0.1000 0.0041
## 10 1.2344 nan 0.1000 0.0033
## 20 1.1815 nan 0.1000 0.0005
## 40 1.1147 nan 0.1000 0.0003
## 60 1.0626 nan 0.1000 0.0011
## 80 1.0235 nan 0.1000 0.0001
## 100 0.9968 nan 0.1000 0.0002
## 120 0.9653 nan 0.1000 -0.0006
## 140 0.9410 nan 0.1000 0.0001
## 160 0.9224 nan 0.1000 0.0005
## 180 0.9003 nan 0.1000 0.0002
## 200 0.8837 nan 0.1000 -0.0004
## 220 0.8695 nan 0.1000 -0.0001
## 240 0.8545 nan 0.1000 -0.0003
## 260 0.8381 nan 0.1000 -0.0002
## 280 0.8235 nan 0.1000 -0.0001
## 300 0.8100 nan 0.1000 -0.0003
## 320 0.7977 nan 0.1000 -0.0002
## 340 0.7849 nan 0.1000 -0.0001
## 360 0.7740 nan 0.1000 -0.0001
## 380 0.7646 nan 0.1000 -0.0001
## 400 0.7550 nan 0.1000 -0.0002
## 420 0.7472 nan 0.1000 -0.0005
## 440 0.7394 nan 0.1000 -0.0001
## 460 0.7327 nan 0.1000 -0.0003
## 480 0.7228 nan 0.1000 -0.0003
## 500 0.7123 nan 0.1000 -0.0006
##
## - Fold01: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## + Fold01: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3380 nan 0.1000 0.0138
## 2 1.3116 nan 0.1000 0.0122
## 3 1.2903 nan 0.1000 0.0104
## 4 1.2734 nan 0.1000 0.0082
## 5 1.2579 nan 0.1000 0.0067
## 6 1.2453 nan 0.1000 0.0057
## 7 1.2325 nan 0.1000 0.0053
## 8 1.2218 nan 0.1000 0.0044
## 9 1.2100 nan 0.1000 0.0054
## 10 1.2007 nan 0.1000 0.0038
## 20 1.1314 nan 0.1000 0.0018
## 40 1.0328 nan 0.1000 0.0005
## 60 0.9772 nan 0.1000 0.0007
## 80 0.9351 nan 0.1000 0.0005
## 100 0.8998 nan 0.1000 0.0005
## 120 0.8593 nan 0.1000 0.0008
## 140 0.8313 nan 0.1000 0.0001
## 160 0.8089 nan 0.1000 0.0002
## 180 0.7873 nan 0.1000 -0.0000
## 200 0.7664 nan 0.1000 0.0003
## 220 0.7492 nan 0.1000 0.0003
## 240 0.7306 nan 0.1000 -0.0001
## 260 0.7135 nan 0.1000 -0.0003
## 280 0.6965 nan 0.1000 0.0004
## 300 0.6813 nan 0.1000 -0.0004
## 320 0.6665 nan 0.1000 0.0004
## 340 0.6542 nan 0.1000 -0.0001
## 360 0.6416 nan 0.1000 -0.0001
## 380 0.6300 nan 0.1000 -0.0002
## 400 0.6180 nan 0.1000 -0.0004
## 420 0.6064 nan 0.1000 -0.0003
## 440 0.5954 nan 0.1000 -0.0001
## 460 0.5853 nan 0.1000 -0.0002
## 480 0.5751 nan 0.1000 -0.0000
## 500 0.5658 nan 0.1000 -0.0003
##
## - Fold01: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## + Fold01: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3377 nan 0.1000 0.0143
## 2 1.3162 nan 0.1000 0.0096
## 3 1.2953 nan 0.1000 0.0094
## 4 1.2776 nan 0.1000 0.0076
## 5 1.2587 nan 0.1000 0.0089
## 6 1.2434 nan 0.1000 0.0064
## 7 1.2292 nan 0.1000 0.0060
## 8 1.2185 nan 0.1000 0.0044
## 9 1.2060 nan 0.1000 0.0048
## 10 1.1972 nan 0.1000 0.0036
## 20 1.1107 nan 0.1000 0.0038
## 40 1.0144 nan 0.1000 0.0001
## 60 0.9394 nan 0.1000 0.0005
## 80 0.8899 nan 0.1000 -0.0002
## 100 0.8533 nan 0.1000 -0.0000
## 120 0.8135 nan 0.1000 0.0007
## 140 0.7786 nan 0.1000 -0.0000
## 160 0.7458 nan 0.1000 0.0002
## 180 0.7211 nan 0.1000 -0.0004
## 200 0.6975 nan 0.1000 -0.0003
## 220 0.6740 nan 0.1000 -0.0004
## 240 0.6522 nan 0.1000 -0.0001
## 260 0.6312 nan 0.1000 -0.0001
## 280 0.6134 nan 0.1000 -0.0001
## 300 0.5964 nan 0.1000 -0.0006
## 320 0.5798 nan 0.1000 0.0001
## 340 0.5630 nan 0.1000 -0.0001
## 360 0.5484 nan 0.1000 -0.0004
## 380 0.5355 nan 0.1000 -0.0003
## 400 0.5221 nan 0.1000 -0.0003
## 420 0.5087 nan 0.1000 -0.0002
## 440 0.4971 nan 0.1000 -0.0002
## 460 0.4862 nan 0.1000 -0.0003
## 480 0.4738 nan 0.1000 -0.0001
## 500 0.4629 nan 0.1000 -0.0003
##
## - Fold01: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## + Fold01: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3296 nan 0.1000 0.0160
## 2 1.3005 nan 0.1000 0.0135
## 3 1.2757 nan 0.1000 0.0117
## 4 1.2525 nan 0.1000 0.0098
## 5 1.2321 nan 0.1000 0.0090
## 6 1.2157 nan 0.1000 0.0071
## 7 1.1982 nan 0.1000 0.0073
## 8 1.1848 nan 0.1000 0.0051
## 9 1.1720 nan 0.1000 0.0050
## 10 1.1610 nan 0.1000 0.0047
## 20 1.0702 nan 0.1000 0.0026
## 40 0.9570 nan 0.1000 0.0008
## 60 0.8897 nan 0.1000 0.0005
## 80 0.8274 nan 0.1000 0.0007
## 100 0.7833 nan 0.1000 0.0000
## 120 0.7424 nan 0.1000 0.0001
## 140 0.7084 nan 0.1000 0.0000
## 160 0.6797 nan 0.1000 -0.0003
## 180 0.6533 nan 0.1000 0.0006
## 200 0.6281 nan 0.1000 0.0002
## 220 0.6031 nan 0.1000 0.0001
## 240 0.5830 nan 0.1000 -0.0005
## 260 0.5593 nan 0.1000 -0.0002
## 280 0.5377 nan 0.1000 -0.0004
## 300 0.5215 nan 0.1000 -0.0002
## 320 0.5052 nan 0.1000 -0.0005
## 340 0.4877 nan 0.1000 -0.0004
## 360 0.4730 nan 0.1000 -0.0007
## 380 0.4591 nan 0.1000 -0.0002
## 400 0.4452 nan 0.1000 -0.0003
## 420 0.4305 nan 0.1000 -0.0003
## 440 0.4177 nan 0.1000 -0.0004
## 460 0.4056 nan 0.1000 0.0002
## 480 0.3946 nan 0.1000 -0.0000
## 500 0.3830 nan 0.1000 -0.0003
##
## - Fold01: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## + Fold01: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3332 nan 0.1000 0.0162
## 2 1.3053 nan 0.1000 0.0133
## 3 1.2797 nan 0.1000 0.0111
## 4 1.2566 nan 0.1000 0.0097
## 5 1.2380 nan 0.1000 0.0079
## 6 1.2210 nan 0.1000 0.0070
## 7 1.2023 nan 0.1000 0.0089
## 8 1.1891 nan 0.1000 0.0051
## 9 1.1748 nan 0.1000 0.0058
## 10 1.1570 nan 0.1000 0.0069
## 20 1.0579 nan 0.1000 0.0026
## 40 0.9298 nan 0.1000 0.0015
## 60 0.8563 nan 0.1000 0.0007
## 80 0.7940 nan 0.1000 0.0012
## 100 0.7510 nan 0.1000 0.0004
## 120 0.7038 nan 0.1000 0.0002
## 140 0.6645 nan 0.1000 0.0003
## 160 0.6306 nan 0.1000 -0.0001
## 180 0.6010 nan 0.1000 -0.0005
## 200 0.5704 nan 0.1000 0.0001
## 220 0.5476 nan 0.1000 -0.0003
## 240 0.5246 nan 0.1000 -0.0004
## 260 0.5036 nan 0.1000 -0.0002
## 280 0.4837 nan 0.1000 -0.0004
## 300 0.4661 nan 0.1000 -0.0005
## 320 0.4473 nan 0.1000 0.0001
## 340 0.4285 nan 0.1000 -0.0001
## 360 0.4127 nan 0.1000 -0.0008
## 380 0.3977 nan 0.1000 -0.0005
## 400 0.3826 nan 0.1000 -0.0002
## 420 0.3681 nan 0.1000 -0.0001
## 440 0.3555 nan 0.1000 -0.0003
## 460 0.3415 nan 0.1000 -0.0002
## 480 0.3289 nan 0.1000 -0.0001
## 500 0.3166 nan 0.1000 -0.0002
##
## - Fold01: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## + Fold01: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3316 nan 0.1000 0.0161
## 2 1.2980 nan 0.1000 0.0142
## 3 1.2699 nan 0.1000 0.0116
## 4 1.2448 nan 0.1000 0.0126
## 5 1.2250 nan 0.1000 0.0081
## 6 1.2066 nan 0.1000 0.0075
## 7 1.1912 nan 0.1000 0.0067
## 8 1.1751 nan 0.1000 0.0070
## 9 1.1629 nan 0.1000 0.0043
## 10 1.1504 nan 0.1000 0.0046
## 20 1.0521 nan 0.1000 0.0017
## 40 0.9216 nan 0.1000 0.0000
## 60 0.8371 nan 0.1000 0.0003
## 80 0.7705 nan 0.1000 0.0003
## 100 0.7174 nan 0.1000 -0.0000
## 120 0.6724 nan 0.1000 0.0001
## 140 0.6310 nan 0.1000 0.0002
## 160 0.5945 nan 0.1000 0.0002
## 180 0.5599 nan 0.1000 -0.0003
## 200 0.5316 nan 0.1000 -0.0004
## 220 0.5092 nan 0.1000 -0.0001
## 240 0.4835 nan 0.1000 -0.0002
## 260 0.4621 nan 0.1000 -0.0007
## 280 0.4410 nan 0.1000 -0.0000
## 300 0.4197 nan 0.1000 -0.0005
## 320 0.3996 nan 0.1000 -0.0002
## 340 0.3812 nan 0.1000 -0.0005
## 360 0.3656 nan 0.1000 -0.0004
## 380 0.3506 nan 0.1000 -0.0002
## 400 0.3360 nan 0.1000 -0.0004
## 420 0.3229 nan 0.1000 -0.0004
## 440 0.3102 nan 0.1000 -0.0004
## 460 0.2984 nan 0.1000 -0.0003
## 480 0.2868 nan 0.1000 -0.0003
## 500 0.2758 nan 0.1000 -0.0003
##
## - Fold01: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## + Fold01: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3269 nan 0.1000 0.0176
## 2 1.2924 nan 0.1000 0.0164
## 3 1.2614 nan 0.1000 0.0133
## 4 1.2366 nan 0.1000 0.0108
## 5 1.2135 nan 0.1000 0.0100
## 6 1.1929 nan 0.1000 0.0087
## 7 1.1729 nan 0.1000 0.0082
## 8 1.1565 nan 0.1000 0.0055
## 9 1.1389 nan 0.1000 0.0074
## 10 1.1241 nan 0.1000 0.0066
## 20 1.0105 nan 0.1000 0.0030
## 40 0.8658 nan 0.1000 0.0011
## 60 0.7846 nan 0.1000 -0.0001
## 80 0.7142 nan 0.1000 -0.0001
## 100 0.6609 nan 0.1000 -0.0001
## 120 0.6136 nan 0.1000 -0.0000
## 140 0.5722 nan 0.1000 0.0000
## 160 0.5323 nan 0.1000 0.0000
## 180 0.4997 nan 0.1000 -0.0006
## 200 0.4727 nan 0.1000 -0.0004
## 220 0.4487 nan 0.1000 -0.0003
## 240 0.4247 nan 0.1000 -0.0002
## 260 0.4021 nan 0.1000 -0.0003
## 280 0.3809 nan 0.1000 -0.0002
## 300 0.3628 nan 0.1000 -0.0004
## 320 0.3422 nan 0.1000 -0.0003
## 340 0.3237 nan 0.1000 -0.0004
## 360 0.3080 nan 0.1000 -0.0002
## 380 0.2937 nan 0.1000 -0.0003
## 400 0.2794 nan 0.1000 -0.0004
## 420 0.2667 nan 0.1000 -0.0004
## 440 0.2529 nan 0.1000 -0.0002
## 460 0.2414 nan 0.1000 -0.0001
## 480 0.2305 nan 0.1000 -0.0001
## 500 0.2204 nan 0.1000 -0.0003
##
## - Fold01: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## + Fold01: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3285 nan 0.1000 0.0149
## 2 1.2973 nan 0.1000 0.0128
## 3 1.2659 nan 0.1000 0.0143
## 4 1.2388 nan 0.1000 0.0111
## 5 1.2181 nan 0.1000 0.0080
## 6 1.1962 nan 0.1000 0.0078
## 7 1.1766 nan 0.1000 0.0088
## 8 1.1588 nan 0.1000 0.0067
## 9 1.1451 nan 0.1000 0.0050
## 10 1.1303 nan 0.1000 0.0055
## 20 1.0170 nan 0.1000 0.0015
## 40 0.8681 nan 0.1000 0.0014
## 60 0.7737 nan 0.1000 0.0011
## 80 0.7016 nan 0.1000 -0.0001
## 100 0.6460 nan 0.1000 -0.0005
## 120 0.5923 nan 0.1000 0.0004
## 140 0.5488 nan 0.1000 -0.0004
## 160 0.5099 nan 0.1000 -0.0001
## 180 0.4748 nan 0.1000 -0.0001
## 200 0.4452 nan 0.1000 -0.0004
## 220 0.4166 nan 0.1000 -0.0004
## 240 0.3924 nan 0.1000 -0.0004
## 260 0.3707 nan 0.1000 0.0001
## 280 0.3498 nan 0.1000 -0.0005
## 300 0.3286 nan 0.1000 -0.0001
## 320 0.3091 nan 0.1000 -0.0001
## 340 0.2924 nan 0.1000 -0.0002
## 360 0.2781 nan 0.1000 -0.0002
## 380 0.2629 nan 0.1000 -0.0003
## 400 0.2487 nan 0.1000 -0.0003
## 420 0.2354 nan 0.1000 -0.0003
## 440 0.2234 nan 0.1000 -0.0001
## 460 0.2116 nan 0.1000 -0.0001
## 480 0.2007 nan 0.1000 -0.0001
## 500 0.1893 nan 0.1000 -0.0001
##
## - Fold01: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## + Fold01: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3267 nan 0.1000 0.0179
## 2 1.2942 nan 0.1000 0.0151
## 3 1.2679 nan 0.1000 0.0111
## 4 1.2440 nan 0.1000 0.0094
## 5 1.2169 nan 0.1000 0.0126
## 6 1.1959 nan 0.1000 0.0083
## 7 1.1763 nan 0.1000 0.0082
## 8 1.1550 nan 0.1000 0.0091
## 9 1.1350 nan 0.1000 0.0083
## 10 1.1199 nan 0.1000 0.0056
## 20 0.9960 nan 0.1000 0.0034
## 40 0.8368 nan 0.1000 0.0025
## 60 0.7353 nan 0.1000 0.0009
## 80 0.6635 nan 0.1000 -0.0001
## 100 0.6071 nan 0.1000 0.0002
## 120 0.5564 nan 0.1000 -0.0007
## 140 0.5153 nan 0.1000 -0.0005
## 160 0.4808 nan 0.1000 -0.0005
## 180 0.4452 nan 0.1000 -0.0005
## 200 0.4135 nan 0.1000 -0.0003
## 220 0.3853 nan 0.1000 -0.0003
## 240 0.3578 nan 0.1000 0.0002
## 260 0.3365 nan 0.1000 -0.0005
## 280 0.3137 nan 0.1000 -0.0003
## 300 0.2945 nan 0.1000 -0.0005
## 320 0.2756 nan 0.1000 -0.0002
## 340 0.2583 nan 0.1000 -0.0002
## 360 0.2435 nan 0.1000 -0.0002
## 380 0.2300 nan 0.1000 -0.0002
## 400 0.2173 nan 0.1000 -0.0002
## 420 0.2055 nan 0.1000 -0.0004
## 440 0.1937 nan 0.1000 -0.0001
## 460 0.1825 nan 0.1000 -0.0002
## 480 0.1721 nan 0.1000 -0.0003
## 500 0.1620 nan 0.1000 -0.0002
##
## - Fold01: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## + Fold02: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3499 nan 0.1000 0.0080
## 2 1.3351 nan 0.1000 0.0067
## 3 1.3240 nan 0.1000 0.0057
## 4 1.3150 nan 0.1000 0.0036
## 5 1.3061 nan 0.1000 0.0045
## 6 1.2977 nan 0.1000 0.0042
## 7 1.2920 nan 0.1000 0.0025
## 8 1.2847 nan 0.1000 0.0037
## 9 1.2783 nan 0.1000 0.0027
## 10 1.2730 nan 0.1000 0.0025
## 20 1.2332 nan 0.1000 0.0012
## 40 1.1918 nan 0.1000 0.0004
## 60 1.1656 nan 0.1000 -0.0001
## 80 1.1467 nan 0.1000 -0.0001
## 100 1.1335 nan 0.1000 0.0002
## 120 1.1217 nan 0.1000 -0.0002
## 140 1.1112 nan 0.1000 0.0000
## 160 1.1023 nan 0.1000 -0.0001
## 180 1.0934 nan 0.1000 -0.0001
## 200 1.0848 nan 0.1000 -0.0001
## 220 1.0786 nan 0.1000 -0.0002
## 240 1.0715 nan 0.1000 0.0001
## 260 1.0647 nan 0.1000 0.0001
## 280 1.0583 nan 0.1000 -0.0001
## 300 1.0524 nan 0.1000 -0.0004
## 320 1.0460 nan 0.1000 -0.0001
## 340 1.0405 nan 0.1000 -0.0001
## 360 1.0347 nan 0.1000 -0.0002
## 380 1.0292 nan 0.1000 -0.0004
## 400 1.0242 nan 0.1000 -0.0003
## 420 1.0192 nan 0.1000 -0.0002
## 440 1.0143 nan 0.1000 -0.0003
## 460 1.0090 nan 0.1000 -0.0001
## 480 1.0043 nan 0.1000 -0.0003
## 500 0.9995 nan 0.1000 -0.0003
##
## - Fold02: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## + Fold02: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3430 nan 0.1000 0.0115
## 2 1.3271 nan 0.1000 0.0079
## 3 1.3104 nan 0.1000 0.0069
## 4 1.2951 nan 0.1000 0.0072
## 5 1.2822 nan 0.1000 0.0061
## 6 1.2703 nan 0.1000 0.0052
## 7 1.2594 nan 0.1000 0.0044
## 8 1.2505 nan 0.1000 0.0037
## 9 1.2427 nan 0.1000 0.0037
## 10 1.2351 nan 0.1000 0.0034
## 20 1.1715 nan 0.1000 0.0028
## 40 1.1061 nan 0.1000 0.0001
## 60 1.0526 nan 0.1000 0.0014
## 80 1.0093 nan 0.1000 0.0015
## 100 0.9803 nan 0.1000 0.0011
## 120 0.9525 nan 0.1000 -0.0004
## 140 0.9335 nan 0.1000 -0.0001
## 160 0.9168 nan 0.1000 0.0000
## 180 0.9005 nan 0.1000 -0.0002
## 200 0.8793 nan 0.1000 -0.0003
## 220 0.8662 nan 0.1000 -0.0006
## 240 0.8468 nan 0.1000 -0.0002
## 260 0.8313 nan 0.1000 -0.0006
## 280 0.8158 nan 0.1000 -0.0002
## 300 0.8028 nan 0.1000 0.0002
## 320 0.7917 nan 0.1000 0.0002
## 340 0.7783 nan 0.1000 -0.0002
## 360 0.7700 nan 0.1000 -0.0002
## 380 0.7593 nan 0.1000 -0.0002
## 400 0.7503 nan 0.1000 -0.0005
## 420 0.7422 nan 0.1000 -0.0001
## 440 0.7366 nan 0.1000 -0.0002
## 460 0.7276 nan 0.1000 -0.0002
## 480 0.7189 nan 0.1000 -0.0001
## 500 0.7108 nan 0.1000 -0.0006
##
## - Fold02: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## + Fold02: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3395 nan 0.1000 0.0123
## 2 1.3161 nan 0.1000 0.0102
## 3 1.2966 nan 0.1000 0.0081
## 4 1.2788 nan 0.1000 0.0088
## 5 1.2629 nan 0.1000 0.0072
## 6 1.2471 nan 0.1000 0.0069
## 7 1.2338 nan 0.1000 0.0057
## 8 1.2195 nan 0.1000 0.0072
## 9 1.2082 nan 0.1000 0.0048
## 10 1.1983 nan 0.1000 0.0044
## 20 1.1277 nan 0.1000 0.0015
## 40 1.0461 nan 0.1000 0.0010
## 60 0.9941 nan 0.1000 0.0001
## 80 0.9469 nan 0.1000 0.0006
## 100 0.9122 nan 0.1000 -0.0001
## 120 0.8771 nan 0.1000 -0.0002
## 140 0.8444 nan 0.1000 -0.0001
## 160 0.8155 nan 0.1000 0.0002
## 180 0.7909 nan 0.1000 -0.0004
## 200 0.7690 nan 0.1000 0.0008
## 220 0.7435 nan 0.1000 -0.0002
## 240 0.7252 nan 0.1000 -0.0001
## 260 0.7061 nan 0.1000 -0.0001
## 280 0.6896 nan 0.1000 -0.0001
## 300 0.6744 nan 0.1000 -0.0001
## 320 0.6614 nan 0.1000 0.0000
## 340 0.6450 nan 0.1000 -0.0004
## 360 0.6322 nan 0.1000 -0.0002
## 380 0.6188 nan 0.1000 0.0003
## 400 0.6062 nan 0.1000 -0.0005
## 420 0.5946 nan 0.1000 -0.0006
## 440 0.5842 nan 0.1000 0.0000
## 460 0.5741 nan 0.1000 0.0002
## 480 0.5621 nan 0.1000 0.0001
## 500 0.5509 nan 0.1000 -0.0001
##
## - Fold02: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## + Fold02: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3400 nan 0.1000 0.0121
## 2 1.3153 nan 0.1000 0.0111
## 3 1.2939 nan 0.1000 0.0099
## 4 1.2777 nan 0.1000 0.0079
## 5 1.2605 nan 0.1000 0.0081
## 6 1.2457 nan 0.1000 0.0057
## 7 1.2339 nan 0.1000 0.0046
## 8 1.2187 nan 0.1000 0.0078
## 9 1.2065 nan 0.1000 0.0048
## 10 1.1962 nan 0.1000 0.0037
## 20 1.1213 nan 0.1000 0.0027
## 40 1.0229 nan 0.1000 0.0012
## 60 0.9545 nan 0.1000 -0.0000
## 80 0.8997 nan 0.1000 -0.0003
## 100 0.8535 nan 0.1000 0.0003
## 120 0.8122 nan 0.1000 0.0002
## 140 0.7782 nan 0.1000 -0.0000
## 160 0.7512 nan 0.1000 -0.0001
## 180 0.7248 nan 0.1000 -0.0003
## 200 0.6943 nan 0.1000 -0.0000
## 220 0.6706 nan 0.1000 -0.0004
## 240 0.6515 nan 0.1000 -0.0002
## 260 0.6300 nan 0.1000 -0.0006
## 280 0.6120 nan 0.1000 -0.0004
## 300 0.5938 nan 0.1000 0.0003
## 320 0.5764 nan 0.1000 0.0001
## 340 0.5605 nan 0.1000 -0.0002
## 360 0.5459 nan 0.1000 -0.0003
## 380 0.5323 nan 0.1000 -0.0002
## 400 0.5197 nan 0.1000 -0.0004
## 420 0.5057 nan 0.1000 -0.0001
## 440 0.4934 nan 0.1000 -0.0004
## 460 0.4840 nan 0.1000 -0.0001
## 480 0.4734 nan 0.1000 -0.0002
## 500 0.4624 nan 0.1000 -0.0004
##
## - Fold02: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## + Fold02: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3368 nan 0.1000 0.0131
## 2 1.3115 nan 0.1000 0.0117
## 3 1.2915 nan 0.1000 0.0086
## 4 1.2695 nan 0.1000 0.0097
## 5 1.2526 nan 0.1000 0.0071
## 6 1.2363 nan 0.1000 0.0067
## 7 1.2222 nan 0.1000 0.0054
## 8 1.2095 nan 0.1000 0.0050
## 9 1.1971 nan 0.1000 0.0053
## 10 1.1867 nan 0.1000 0.0032
## 20 1.0996 nan 0.1000 0.0030
## 40 0.9935 nan 0.1000 0.0016
## 60 0.9205 nan 0.1000 0.0001
## 80 0.8552 nan 0.1000 0.0005
## 100 0.8070 nan 0.1000 -0.0002
## 120 0.7652 nan 0.1000 0.0008
## 140 0.7283 nan 0.1000 0.0004
## 160 0.6916 nan 0.1000 0.0004
## 180 0.6602 nan 0.1000 -0.0002
## 200 0.6307 nan 0.1000 -0.0004
## 220 0.6058 nan 0.1000 0.0001
## 240 0.5845 nan 0.1000 -0.0001
## 260 0.5627 nan 0.1000 -0.0003
## 280 0.5432 nan 0.1000 -0.0003
## 300 0.5273 nan 0.1000 -0.0004
## 320 0.5082 nan 0.1000 -0.0000
## 340 0.4903 nan 0.1000 -0.0001
## 360 0.4745 nan 0.1000 -0.0003
## 380 0.4580 nan 0.1000 -0.0003
## 400 0.4444 nan 0.1000 -0.0000
## 420 0.4310 nan 0.1000 -0.0002
## 440 0.4171 nan 0.1000 -0.0001
## 460 0.4046 nan 0.1000 -0.0002
## 480 0.3934 nan 0.1000 -0.0003
## 500 0.3816 nan 0.1000 -0.0001
##
## - Fold02: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## + Fold02: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3318 nan 0.1000 0.0158
## 2 1.3055 nan 0.1000 0.0124
## 3 1.2800 nan 0.1000 0.0120
## 4 1.2608 nan 0.1000 0.0079
## 5 1.2426 nan 0.1000 0.0074
## 6 1.2219 nan 0.1000 0.0094
## 7 1.2071 nan 0.1000 0.0067
## 8 1.1940 nan 0.1000 0.0049
## 9 1.1812 nan 0.1000 0.0052
## 10 1.1689 nan 0.1000 0.0051
## 20 1.0767 nan 0.1000 0.0020
## 40 0.9411 nan 0.1000 0.0006
## 60 0.8567 nan 0.1000 0.0006
## 80 0.7925 nan 0.1000 0.0002
## 100 0.7444 nan 0.1000 0.0010
## 120 0.7014 nan 0.1000 0.0008
## 140 0.6624 nan 0.1000 0.0008
## 160 0.6311 nan 0.1000 -0.0003
## 180 0.6008 nan 0.1000 0.0000
## 200 0.5671 nan 0.1000 0.0002
## 220 0.5427 nan 0.1000 -0.0001
## 240 0.5191 nan 0.1000 0.0000
## 260 0.4969 nan 0.1000 -0.0000
## 280 0.4786 nan 0.1000 -0.0006
## 300 0.4576 nan 0.1000 0.0001
## 320 0.4390 nan 0.1000 -0.0003
## 340 0.4215 nan 0.1000 -0.0003
## 360 0.4056 nan 0.1000 -0.0004
## 380 0.3901 nan 0.1000 -0.0005
## 400 0.3734 nan 0.1000 -0.0003
## 420 0.3601 nan 0.1000 -0.0005
## 440 0.3456 nan 0.1000 -0.0002
## 460 0.3323 nan 0.1000 -0.0004
## 480 0.3195 nan 0.1000 -0.0002
## 500 0.3083 nan 0.1000 -0.0002
##
## - Fold02: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## + Fold02: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3325 nan 0.1000 0.0147
## 2 1.2999 nan 0.1000 0.0137
## 3 1.2737 nan 0.1000 0.0110
## 4 1.2521 nan 0.1000 0.0099
## 5 1.2314 nan 0.1000 0.0090
## 6 1.2128 nan 0.1000 0.0069
## 7 1.1970 nan 0.1000 0.0064
## 8 1.1818 nan 0.1000 0.0066
## 9 1.1655 nan 0.1000 0.0066
## 10 1.1542 nan 0.1000 0.0045
## 20 1.0513 nan 0.1000 0.0016
## 40 0.9292 nan 0.1000 0.0015
## 60 0.8515 nan 0.1000 0.0001
## 80 0.7849 nan 0.1000 0.0014
## 100 0.7218 nan 0.1000 0.0001
## 120 0.6736 nan 0.1000 0.0000
## 140 0.6331 nan 0.1000 0.0003
## 160 0.5964 nan 0.1000 -0.0003
## 180 0.5655 nan 0.1000 -0.0003
## 200 0.5368 nan 0.1000 -0.0003
## 220 0.5063 nan 0.1000 -0.0003
## 240 0.4827 nan 0.1000 -0.0004
## 260 0.4610 nan 0.1000 -0.0002
## 280 0.4359 nan 0.1000 0.0002
## 300 0.4172 nan 0.1000 -0.0000
## 320 0.3972 nan 0.1000 -0.0003
## 340 0.3796 nan 0.1000 -0.0003
## 360 0.3625 nan 0.1000 -0.0001
## 380 0.3472 nan 0.1000 0.0000
## 400 0.3332 nan 0.1000 -0.0006
## 420 0.3183 nan 0.1000 -0.0002
## 440 0.3041 nan 0.1000 -0.0001
## 460 0.2916 nan 0.1000 -0.0004
## 480 0.2804 nan 0.1000 -0.0000
## 500 0.2688 nan 0.1000 -0.0002
##
## - Fold02: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## + Fold02: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3306 nan 0.1000 0.0161
## 2 1.3010 nan 0.1000 0.0128
## 3 1.2780 nan 0.1000 0.0098
## 4 1.2557 nan 0.1000 0.0087
## 5 1.2349 nan 0.1000 0.0087
## 6 1.2171 nan 0.1000 0.0067
## 7 1.1991 nan 0.1000 0.0066
## 8 1.1820 nan 0.1000 0.0066
## 9 1.1650 nan 0.1000 0.0070
## 10 1.1518 nan 0.1000 0.0048
## 20 1.0392 nan 0.1000 0.0031
## 40 0.9021 nan 0.1000 0.0024
## 60 0.8123 nan 0.1000 0.0012
## 80 0.7459 nan 0.1000 -0.0001
## 100 0.6916 nan 0.1000 -0.0003
## 120 0.6444 nan 0.1000 -0.0000
## 140 0.5976 nan 0.1000 -0.0003
## 160 0.5566 nan 0.1000 -0.0003
## 180 0.5229 nan 0.1000 0.0001
## 200 0.4941 nan 0.1000 0.0001
## 220 0.4689 nan 0.1000 -0.0005
## 240 0.4433 nan 0.1000 0.0002
## 260 0.4202 nan 0.1000 -0.0004
## 280 0.3965 nan 0.1000 -0.0003
## 300 0.3751 nan 0.1000 -0.0003
## 320 0.3557 nan 0.1000 -0.0001
## 340 0.3375 nan 0.1000 -0.0003
## 360 0.3199 nan 0.1000 -0.0003
## 380 0.3048 nan 0.1000 -0.0004
## 400 0.2907 nan 0.1000 -0.0004
## 420 0.2770 nan 0.1000 -0.0004
## 440 0.2629 nan 0.1000 -0.0000
## 460 0.2507 nan 0.1000 0.0001
## 480 0.2400 nan 0.1000 -0.0002
## 500 0.2287 nan 0.1000 -0.0003
##
## - Fold02: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## + Fold02: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3242 nan 0.1000 0.0196
## 2 1.2907 nan 0.1000 0.0156
## 3 1.2646 nan 0.1000 0.0119
## 4 1.2368 nan 0.1000 0.0117
## 5 1.2145 nan 0.1000 0.0089
## 6 1.1928 nan 0.1000 0.0085
## 7 1.1724 nan 0.1000 0.0073
## 8 1.1547 nan 0.1000 0.0069
## 9 1.1394 nan 0.1000 0.0060
## 10 1.1238 nan 0.1000 0.0062
## 20 1.0152 nan 0.1000 0.0018
## 40 0.8683 nan 0.1000 0.0002
## 60 0.7686 nan 0.1000 0.0016
## 80 0.6979 nan 0.1000 0.0008
## 100 0.6387 nan 0.1000 0.0010
## 120 0.5884 nan 0.1000 0.0003
## 140 0.5483 nan 0.1000 -0.0006
## 160 0.5081 nan 0.1000 -0.0009
## 180 0.4748 nan 0.1000 -0.0005
## 200 0.4406 nan 0.1000 -0.0001
## 220 0.4164 nan 0.1000 -0.0004
## 240 0.3905 nan 0.1000 -0.0002
## 260 0.3690 nan 0.1000 0.0001
## 280 0.3478 nan 0.1000 0.0000
## 300 0.3288 nan 0.1000 -0.0003
## 320 0.3102 nan 0.1000 -0.0000
## 340 0.2939 nan 0.1000 -0.0005
## 360 0.2769 nan 0.1000 -0.0002
## 380 0.2619 nan 0.1000 -0.0000
## 400 0.2478 nan 0.1000 -0.0001
## 420 0.2346 nan 0.1000 -0.0001
## 440 0.2227 nan 0.1000 -0.0001
## 460 0.2117 nan 0.1000 -0.0003
## 480 0.2005 nan 0.1000 -0.0001
## 500 0.1906 nan 0.1000 -0.0001
##
## - Fold02: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## + Fold02: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3253 nan 0.1000 0.0196
## 2 1.2934 nan 0.1000 0.0131
## 3 1.2683 nan 0.1000 0.0100
## 4 1.2434 nan 0.1000 0.0098
## 5 1.2242 nan 0.1000 0.0079
## 6 1.2020 nan 0.1000 0.0097
## 7 1.1844 nan 0.1000 0.0061
## 8 1.1668 nan 0.1000 0.0061
## 9 1.1537 nan 0.1000 0.0039
## 10 1.1367 nan 0.1000 0.0072
## 20 1.0182 nan 0.1000 0.0026
## 40 0.8587 nan 0.1000 0.0013
## 60 0.7624 nan 0.1000 0.0008
## 80 0.6870 nan 0.1000 0.0003
## 100 0.6247 nan 0.1000 -0.0004
## 120 0.5741 nan 0.1000 0.0002
## 140 0.5272 nan 0.1000 0.0004
## 160 0.4874 nan 0.1000 -0.0008
## 180 0.4548 nan 0.1000 -0.0003
## 200 0.4237 nan 0.1000 -0.0006
## 220 0.3953 nan 0.1000 -0.0001
## 240 0.3702 nan 0.1000 -0.0003
## 260 0.3465 nan 0.1000 -0.0002
## 280 0.3265 nan 0.1000 -0.0005
## 300 0.3056 nan 0.1000 -0.0005
## 320 0.2860 nan 0.1000 -0.0004
## 340 0.2689 nan 0.1000 -0.0006
## 360 0.2539 nan 0.1000 -0.0001
## 380 0.2379 nan 0.1000 -0.0003
## 400 0.2255 nan 0.1000 -0.0001
## 420 0.2126 nan 0.1000 -0.0002
## 440 0.2011 nan 0.1000 -0.0002
## 460 0.1893 nan 0.1000 -0.0004
## 480 0.1792 nan 0.1000 -0.0002
## 500 0.1689 nan 0.1000 -0.0001
##
## - Fold02: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## + Fold03: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3507 nan 0.1000 0.0071
## 2 1.3384 nan 0.1000 0.0058
## 3 1.3263 nan 0.1000 0.0056
## 4 1.3175 nan 0.1000 0.0045
## 5 1.3073 nan 0.1000 0.0043
## 6 1.2991 nan 0.1000 0.0040
## 7 1.2910 nan 0.1000 0.0034
## 8 1.2820 nan 0.1000 0.0037
## 9 1.2751 nan 0.1000 0.0033
## 10 1.2693 nan 0.1000 0.0026
## 20 1.2214 nan 0.1000 0.0017
## 40 1.1716 nan 0.1000 0.0005
## 60 1.1414 nan 0.1000 0.0002
## 80 1.1194 nan 0.1000 0.0000
## 100 1.1038 nan 0.1000 0.0000
## 120 1.0915 nan 0.1000 0.0002
## 140 1.0813 nan 0.1000 -0.0003
## 160 1.0728 nan 0.1000 -0.0004
## 180 1.0639 nan 0.1000 -0.0000
## 200 1.0560 nan 0.1000 -0.0003
## 220 1.0486 nan 0.1000 -0.0003
## 240 1.0408 nan 0.1000 -0.0001
## 260 1.0340 nan 0.1000 0.0000
## 280 1.0273 nan 0.1000 -0.0002
## 300 1.0191 nan 0.1000 -0.0000
## 320 1.0131 nan 0.1000 -0.0000
## 340 1.0079 nan 0.1000 -0.0004
## 360 1.0014 nan 0.1000 -0.0002
## 380 0.9964 nan 0.1000 -0.0004
## 400 0.9915 nan 0.1000 -0.0002
## 420 0.9854 nan 0.1000 0.0000
## 440 0.9804 nan 0.1000 -0.0003
## 460 0.9755 nan 0.1000 -0.0002
## 480 0.9713 nan 0.1000 -0.0002
## 500 0.9661 nan 0.1000 -0.0001
##
## - Fold03: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## + Fold03: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3429 nan 0.1000 0.0107
## 2 1.3247 nan 0.1000 0.0086
## 3 1.3072 nan 0.1000 0.0076
## 4 1.2945 nan 0.1000 0.0061
## 5 1.2817 nan 0.1000 0.0057
## 6 1.2695 nan 0.1000 0.0052
## 7 1.2593 nan 0.1000 0.0037
## 8 1.2487 nan 0.1000 0.0038
## 9 1.2394 nan 0.1000 0.0040
## 10 1.2336 nan 0.1000 0.0021
## 20 1.1746 nan 0.1000 0.0018
## 40 1.1023 nan 0.1000 0.0007
## 60 1.0560 nan 0.1000 0.0002
## 80 1.0200 nan 0.1000 0.0002
## 100 0.9876 nan 0.1000 0.0001
## 120 0.9629 nan 0.1000 -0.0005
## 140 0.9404 nan 0.1000 0.0008
## 160 0.9174 nan 0.1000 -0.0004
## 180 0.8986 nan 0.1000 0.0000
## 200 0.8797 nan 0.1000 0.0000
## 220 0.8651 nan 0.1000 -0.0001
## 240 0.8497 nan 0.1000 0.0003
## 260 0.8362 nan 0.1000 -0.0005
## 280 0.8236 nan 0.1000 -0.0003
## 300 0.8058 nan 0.1000 -0.0003
## 320 0.7913 nan 0.1000 -0.0003
## 340 0.7792 nan 0.1000 -0.0003
## 360 0.7668 nan 0.1000 0.0002
## 380 0.7536 nan 0.1000 -0.0002
## 400 0.7443 nan 0.1000 -0.0003
## 420 0.7339 nan 0.1000 -0.0004
## 440 0.7266 nan 0.1000 -0.0002
## 460 0.7154 nan 0.1000 -0.0003
## 480 0.7051 nan 0.1000 -0.0002
## 500 0.6979 nan 0.1000 -0.0003
##
## - Fold03: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## + Fold03: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3399 nan 0.1000 0.0134
## 2 1.3165 nan 0.1000 0.0097
## 3 1.2987 nan 0.1000 0.0074
## 4 1.2821 nan 0.1000 0.0076
## 5 1.2677 nan 0.1000 0.0067
## 6 1.2525 nan 0.1000 0.0069
## 7 1.2401 nan 0.1000 0.0055
## 8 1.2287 nan 0.1000 0.0040
## 9 1.2196 nan 0.1000 0.0030
## 10 1.2094 nan 0.1000 0.0035
## 20 1.1350 nan 0.1000 0.0016
## 40 1.0481 nan 0.1000 0.0012
## 60 0.9958 nan 0.1000 0.0001
## 80 0.9480 nan 0.1000 0.0005
## 100 0.9128 nan 0.1000 0.0015
## 120 0.8794 nan 0.1000 -0.0003
## 140 0.8469 nan 0.1000 -0.0001
## 160 0.8184 nan 0.1000 0.0000
## 180 0.7950 nan 0.1000 0.0002
## 200 0.7750 nan 0.1000 0.0000
## 220 0.7540 nan 0.1000 -0.0001
## 240 0.7376 nan 0.1000 -0.0002
## 260 0.7190 nan 0.1000 -0.0005
## 280 0.7034 nan 0.1000 -0.0000
## 300 0.6885 nan 0.1000 0.0001
## 320 0.6758 nan 0.1000 -0.0000
## 340 0.6629 nan 0.1000 -0.0001
## 360 0.6500 nan 0.1000 0.0001
## 380 0.6361 nan 0.1000 0.0004
## 400 0.6237 nan 0.1000 -0.0002
## 420 0.6129 nan 0.1000 -0.0003
## 440 0.6029 nan 0.1000 -0.0003
## 460 0.5911 nan 0.1000 -0.0006
## 480 0.5792 nan 0.1000 -0.0002
## 500 0.5705 nan 0.1000 -0.0003
##
## - Fold03: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## + Fold03: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3358 nan 0.1000 0.0141
## 2 1.3113 nan 0.1000 0.0119
## 3 1.2917 nan 0.1000 0.0086
## 4 1.2728 nan 0.1000 0.0073
## 5 1.2575 nan 0.1000 0.0068
## 6 1.2423 nan 0.1000 0.0071
## 7 1.2300 nan 0.1000 0.0054
## 8 1.2190 nan 0.1000 0.0043
## 9 1.2077 nan 0.1000 0.0047
## 10 1.1968 nan 0.1000 0.0040
## 20 1.1161 nan 0.1000 0.0012
## 40 1.0159 nan 0.1000 0.0008
## 60 0.9481 nan 0.1000 0.0004
## 80 0.8995 nan 0.1000 0.0019
## 100 0.8639 nan 0.1000 -0.0001
## 120 0.8234 nan 0.1000 -0.0003
## 140 0.7929 nan 0.1000 0.0003
## 160 0.7636 nan 0.1000 0.0006
## 180 0.7362 nan 0.1000 -0.0001
## 200 0.7143 nan 0.1000 -0.0000
## 220 0.6914 nan 0.1000 -0.0005
## 240 0.6675 nan 0.1000 -0.0001
## 260 0.6498 nan 0.1000 -0.0004
## 280 0.6313 nan 0.1000 -0.0001
## 300 0.6135 nan 0.1000 -0.0000
## 320 0.6016 nan 0.1000 -0.0002
## 340 0.5861 nan 0.1000 -0.0003
## 360 0.5707 nan 0.1000 0.0002
## 380 0.5570 nan 0.1000 -0.0005
## 400 0.5426 nan 0.1000 -0.0004
## 420 0.5292 nan 0.1000 -0.0003
## 440 0.5166 nan 0.1000 -0.0005
## 460 0.5049 nan 0.1000 -0.0003
## 480 0.4930 nan 0.1000 -0.0005
## 500 0.4814 nan 0.1000 -0.0002
##
## - Fold03: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## + Fold03: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3347 nan 0.1000 0.0138
## 2 1.3052 nan 0.1000 0.0130
## 3 1.2816 nan 0.1000 0.0108
## 4 1.2625 nan 0.1000 0.0091
## 5 1.2436 nan 0.1000 0.0078
## 6 1.2280 nan 0.1000 0.0065
## 7 1.2131 nan 0.1000 0.0057
## 8 1.1990 nan 0.1000 0.0058
## 9 1.1880 nan 0.1000 0.0047
## 10 1.1782 nan 0.1000 0.0042
## 20 1.0904 nan 0.1000 0.0021
## 40 0.9638 nan 0.1000 0.0024
## 60 0.8946 nan 0.1000 0.0017
## 80 0.8337 nan 0.1000 0.0010
## 100 0.7897 nan 0.1000 -0.0004
## 120 0.7529 nan 0.1000 -0.0001
## 140 0.7216 nan 0.1000 0.0001
## 160 0.6890 nan 0.1000 -0.0002
## 180 0.6600 nan 0.1000 -0.0002
## 200 0.6332 nan 0.1000 0.0003
## 220 0.6085 nan 0.1000 -0.0002
## 240 0.5861 nan 0.1000 -0.0002
## 260 0.5633 nan 0.1000 -0.0000
## 280 0.5429 nan 0.1000 -0.0001
## 300 0.5245 nan 0.1000 0.0001
## 320 0.5072 nan 0.1000 -0.0003
## 340 0.4895 nan 0.1000 -0.0004
## 360 0.4715 nan 0.1000 0.0001
## 380 0.4540 nan 0.1000 -0.0005
## 400 0.4393 nan 0.1000 -0.0003
## 420 0.4254 nan 0.1000 -0.0002
## 440 0.4110 nan 0.1000 -0.0002
## 460 0.4004 nan 0.1000 -0.0003
## 480 0.3886 nan 0.1000 -0.0001
## 500 0.3775 nan 0.1000 -0.0005
##
## - Fold03: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## + Fold03: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3320 nan 0.1000 0.0154
## 2 1.3046 nan 0.1000 0.0121
## 3 1.2767 nan 0.1000 0.0123
## 4 1.2549 nan 0.1000 0.0107
## 5 1.2357 nan 0.1000 0.0085
## 6 1.2145 nan 0.1000 0.0093
## 7 1.1995 nan 0.1000 0.0055
## 8 1.1842 nan 0.1000 0.0057
## 9 1.1707 nan 0.1000 0.0044
## 10 1.1589 nan 0.1000 0.0039
## 20 1.0662 nan 0.1000 0.0040
## 40 0.9425 nan 0.1000 0.0009
## 60 0.8603 nan 0.1000 0.0008
## 80 0.7960 nan 0.1000 -0.0001
## 100 0.7468 nan 0.1000 0.0005
## 120 0.7019 nan 0.1000 -0.0001
## 140 0.6695 nan 0.1000 0.0000
## 160 0.6354 nan 0.1000 0.0001
## 180 0.6050 nan 0.1000 -0.0005
## 200 0.5765 nan 0.1000 -0.0007
## 220 0.5521 nan 0.1000 -0.0001
## 240 0.5288 nan 0.1000 -0.0006
## 260 0.5094 nan 0.1000 -0.0003
## 280 0.4904 nan 0.1000 0.0001
## 300 0.4713 nan 0.1000 -0.0006
## 320 0.4518 nan 0.1000 -0.0000
## 340 0.4341 nan 0.1000 -0.0006
## 360 0.4188 nan 0.1000 -0.0003
## 380 0.4028 nan 0.1000 -0.0001
## 400 0.3874 nan 0.1000 -0.0001
## 420 0.3734 nan 0.1000 -0.0003
## 440 0.3585 nan 0.1000 -0.0001
## 460 0.3458 nan 0.1000 -0.0001
## 480 0.3342 nan 0.1000 -0.0003
## 500 0.3217 nan 0.1000 -0.0003
##
## - Fold03: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## + Fold03: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3282 nan 0.1000 0.0170
## 2 1.2972 nan 0.1000 0.0144
## 3 1.2707 nan 0.1000 0.0114
## 4 1.2453 nan 0.1000 0.0112
## 5 1.2245 nan 0.1000 0.0087
## 6 1.2062 nan 0.1000 0.0079
## 7 1.1907 nan 0.1000 0.0057
## 8 1.1754 nan 0.1000 0.0061
## 9 1.1627 nan 0.1000 0.0034
## 10 1.1495 nan 0.1000 0.0052
## 20 1.0401 nan 0.1000 0.0028
## 40 0.9022 nan 0.1000 0.0012
## 60 0.8176 nan 0.1000 0.0003
## 80 0.7548 nan 0.1000 0.0008
## 100 0.6994 nan 0.1000 0.0000
## 120 0.6551 nan 0.1000 0.0001
## 140 0.6154 nan 0.1000 0.0001
## 160 0.5814 nan 0.1000 -0.0004
## 180 0.5512 nan 0.1000 -0.0004
## 200 0.5231 nan 0.1000 -0.0000
## 220 0.4982 nan 0.1000 -0.0001
## 240 0.4740 nan 0.1000 -0.0001
## 260 0.4506 nan 0.1000 -0.0005
## 280 0.4299 nan 0.1000 -0.0004
## 300 0.4091 nan 0.1000 -0.0005
## 320 0.3912 nan 0.1000 -0.0001
## 340 0.3750 nan 0.1000 -0.0004
## 360 0.3601 nan 0.1000 -0.0001
## 380 0.3446 nan 0.1000 -0.0002
## 400 0.3314 nan 0.1000 -0.0005
## 420 0.3157 nan 0.1000 -0.0001
## 440 0.3027 nan 0.1000 -0.0003
## 460 0.2895 nan 0.1000 -0.0002
## 480 0.2770 nan 0.1000 -0.0003
## 500 0.2664 nan 0.1000 -0.0002
##
## - Fold03: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## + Fold03: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3267 nan 0.1000 0.0178
## 2 1.2960 nan 0.1000 0.0123
## 3 1.2639 nan 0.1000 0.0146
## 4 1.2391 nan 0.1000 0.0113
## 5 1.2158 nan 0.1000 0.0097
## 6 1.1943 nan 0.1000 0.0081
## 7 1.1778 nan 0.1000 0.0060
## 8 1.1617 nan 0.1000 0.0063
## 9 1.1458 nan 0.1000 0.0068
## 10 1.1322 nan 0.1000 0.0048
## 20 1.0297 nan 0.1000 0.0015
## 40 0.8821 nan 0.1000 -0.0001
## 60 0.8016 nan 0.1000 0.0000
## 80 0.7297 nan 0.1000 -0.0006
## 100 0.6728 nan 0.1000 0.0001
## 120 0.6268 nan 0.1000 -0.0007
## 140 0.5870 nan 0.1000 -0.0007
## 160 0.5466 nan 0.1000 -0.0002
## 180 0.5107 nan 0.1000 0.0000
## 200 0.4808 nan 0.1000 -0.0007
## 220 0.4551 nan 0.1000 -0.0008
## 240 0.4301 nan 0.1000 -0.0002
## 260 0.4082 nan 0.1000 -0.0006
## 280 0.3860 nan 0.1000 -0.0002
## 300 0.3659 nan 0.1000 0.0000
## 320 0.3486 nan 0.1000 -0.0005
## 340 0.3304 nan 0.1000 -0.0002
## 360 0.3135 nan 0.1000 -0.0002
## 380 0.2984 nan 0.1000 -0.0001
## 400 0.2849 nan 0.1000 -0.0003
## 420 0.2711 nan 0.1000 -0.0004
## 440 0.2580 nan 0.1000 -0.0001
## 460 0.2466 nan 0.1000 -0.0004
## 480 0.2353 nan 0.1000 -0.0003
## 500 0.2249 nan 0.1000 -0.0004
##
## - Fold03: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## + Fold03: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3274 nan 0.1000 0.0182
## 2 1.2955 nan 0.1000 0.0145
## 3 1.2659 nan 0.1000 0.0125
## 4 1.2405 nan 0.1000 0.0097
## 5 1.2188 nan 0.1000 0.0106
## 6 1.1973 nan 0.1000 0.0091
## 7 1.1796 nan 0.1000 0.0072
## 8 1.1621 nan 0.1000 0.0070
## 9 1.1465 nan 0.1000 0.0056
## 10 1.1315 nan 0.1000 0.0064
## 20 1.0092 nan 0.1000 0.0047
## 40 0.8701 nan 0.1000 0.0019
## 60 0.7757 nan 0.1000 -0.0002
## 80 0.7109 nan 0.1000 -0.0004
## 100 0.6530 nan 0.1000 0.0002
## 120 0.6014 nan 0.1000 -0.0002
## 140 0.5541 nan 0.1000 -0.0002
## 160 0.5157 nan 0.1000 0.0001
## 180 0.4852 nan 0.1000 -0.0005
## 200 0.4522 nan 0.1000 -0.0006
## 220 0.4244 nan 0.1000 -0.0003
## 240 0.3986 nan 0.1000 -0.0003
## 260 0.3765 nan 0.1000 -0.0003
## 280 0.3545 nan 0.1000 -0.0003
## 300 0.3327 nan 0.1000 -0.0004
## 320 0.3137 nan 0.1000 -0.0003
## 340 0.2967 nan 0.1000 -0.0003
## 360 0.2817 nan 0.1000 -0.0005
## 380 0.2673 nan 0.1000 -0.0005
## 400 0.2524 nan 0.1000 -0.0002
## 420 0.2397 nan 0.1000 -0.0002
## 440 0.2279 nan 0.1000 -0.0002
## 460 0.2166 nan 0.1000 -0.0004
## 480 0.2045 nan 0.1000 -0.0004
## 500 0.1938 nan 0.1000 -0.0002
##
## - Fold03: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## + Fold03: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3259 nan 0.1000 0.0191
## 2 1.2922 nan 0.1000 0.0149
## 3 1.2658 nan 0.1000 0.0111
## 4 1.2369 nan 0.1000 0.0130
## 5 1.2161 nan 0.1000 0.0084
## 6 1.1935 nan 0.1000 0.0095
## 7 1.1731 nan 0.1000 0.0077
## 8 1.1554 nan 0.1000 0.0062
## 9 1.1374 nan 0.1000 0.0080
## 10 1.1210 nan 0.1000 0.0057
## 20 1.0061 nan 0.1000 0.0029
## 40 0.8519 nan 0.1000 0.0023
## 60 0.7531 nan 0.1000 0.0009
## 80 0.6777 nan 0.1000 0.0007
## 100 0.6194 nan 0.1000 -0.0003
## 120 0.5684 nan 0.1000 -0.0002
## 140 0.5213 nan 0.1000 -0.0004
## 160 0.4804 nan 0.1000 -0.0005
## 180 0.4459 nan 0.1000 -0.0001
## 200 0.4153 nan 0.1000 -0.0002
## 220 0.3869 nan 0.1000 -0.0001
## 240 0.3597 nan 0.1000 -0.0000
## 260 0.3346 nan 0.1000 -0.0002
## 280 0.3145 nan 0.1000 -0.0006
## 300 0.2955 nan 0.1000 -0.0004
## 320 0.2775 nan 0.1000 -0.0004
## 340 0.2588 nan 0.1000 -0.0004
## 360 0.2444 nan 0.1000 -0.0002
## 380 0.2290 nan 0.1000 -0.0003
## 400 0.2144 nan 0.1000 -0.0002
## 420 0.2026 nan 0.1000 -0.0002
## 440 0.1909 nan 0.1000 -0.0001
## 460 0.1798 nan 0.1000 -0.0003
## 480 0.1700 nan 0.1000 -0.0001
## 500 0.1606 nan 0.1000 -0.0002
##
## - Fold03: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## + Fold04: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3485 nan 0.1000 0.0083
## 2 1.3336 nan 0.1000 0.0070
## 3 1.3204 nan 0.1000 0.0062
## 4 1.3088 nan 0.1000 0.0051
## 5 1.2985 nan 0.1000 0.0050
## 6 1.2893 nan 0.1000 0.0039
## 7 1.2816 nan 0.1000 0.0040
## 8 1.2743 nan 0.1000 0.0030
## 9 1.2679 nan 0.1000 0.0023
## 10 1.2609 nan 0.1000 0.0028
## 20 1.2187 nan 0.1000 0.0012
## 40 1.1743 nan 0.1000 0.0001
## 60 1.1487 nan 0.1000 0.0003
## 80 1.1281 nan 0.1000 -0.0000
## 100 1.1129 nan 0.1000 0.0001
## 120 1.1001 nan 0.1000 -0.0002
## 140 1.0901 nan 0.1000 -0.0001
## 160 1.0796 nan 0.1000 0.0000
## 180 1.0709 nan 0.1000 -0.0000
## 200 1.0622 nan 0.1000 0.0000
## 220 1.0535 nan 0.1000 -0.0001
## 240 1.0463 nan 0.1000 -0.0003
## 260 1.0393 nan 0.1000 -0.0002
## 280 1.0323 nan 0.1000 -0.0002
## 300 1.0245 nan 0.1000 -0.0002
## 320 1.0178 nan 0.1000 -0.0002
## 340 1.0114 nan 0.1000 0.0000
## 360 1.0052 nan 0.1000 -0.0002
## 380 0.9993 nan 0.1000 -0.0001
## 400 0.9933 nan 0.1000 -0.0003
## 420 0.9878 nan 0.1000 -0.0002
## 440 0.9823 nan 0.1000 -0.0002
## 460 0.9775 nan 0.1000 -0.0002
## 480 0.9729 nan 0.1000 -0.0001
## 500 0.9685 nan 0.1000 -0.0001
##
## - Fold04: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## + Fold04: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3432 nan 0.1000 0.0093
## 2 1.3288 nan 0.1000 0.0065
## 3 1.3121 nan 0.1000 0.0070
## 4 1.2996 nan 0.1000 0.0060
## 5 1.2863 nan 0.1000 0.0057
## 6 1.2775 nan 0.1000 0.0041
## 7 1.2678 nan 0.1000 0.0039
## 8 1.2595 nan 0.1000 0.0034
## 9 1.2497 nan 0.1000 0.0042
## 10 1.2419 nan 0.1000 0.0035
## 20 1.1781 nan 0.1000 0.0033
## 40 1.1133 nan 0.1000 0.0025
## 60 1.0615 nan 0.1000 0.0007
## 80 1.0280 nan 0.1000 -0.0003
## 100 0.9970 nan 0.1000 0.0010
## 120 0.9671 nan 0.1000 0.0009
## 140 0.9452 nan 0.1000 -0.0002
## 160 0.9226 nan 0.1000 -0.0003
## 180 0.9021 nan 0.1000 -0.0003
## 200 0.8844 nan 0.1000 -0.0007
## 220 0.8680 nan 0.1000 0.0001
## 240 0.8540 nan 0.1000 -0.0006
## 260 0.8385 nan 0.1000 -0.0001
## 280 0.8251 nan 0.1000 -0.0001
## 300 0.8144 nan 0.1000 -0.0002
## 320 0.8025 nan 0.1000 -0.0005
## 340 0.7893 nan 0.1000 0.0000
## 360 0.7800 nan 0.1000 -0.0003
## 380 0.7689 nan 0.1000 -0.0002
## 400 0.7585 nan 0.1000 -0.0002
## 420 0.7489 nan 0.1000 0.0005
## 440 0.7381 nan 0.1000 -0.0004
## 460 0.7284 nan 0.1000 -0.0001
## 480 0.7180 nan 0.1000 -0.0001
## 500 0.7105 nan 0.1000 -0.0007
##
## - Fold04: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## + Fold04: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3365 nan 0.1000 0.0132
## 2 1.3158 nan 0.1000 0.0099
## 3 1.2940 nan 0.1000 0.0105
## 4 1.2775 nan 0.1000 0.0077
## 5 1.2657 nan 0.1000 0.0044
## 6 1.2528 nan 0.1000 0.0056
## 7 1.2390 nan 0.1000 0.0062
## 8 1.2265 nan 0.1000 0.0060
## 9 1.2151 nan 0.1000 0.0046
## 10 1.2049 nan 0.1000 0.0038
## 20 1.1321 nan 0.1000 0.0014
## 40 1.0420 nan 0.1000 0.0008
## 60 0.9930 nan 0.1000 0.0002
## 80 0.9467 nan 0.1000 0.0007
## 100 0.9073 nan 0.1000 0.0002
## 120 0.8705 nan 0.1000 0.0001
## 140 0.8363 nan 0.1000 0.0010
## 160 0.8116 nan 0.1000 -0.0002
## 180 0.7886 nan 0.1000 0.0005
## 200 0.7636 nan 0.1000 0.0003
## 220 0.7464 nan 0.1000 0.0000
## 240 0.7284 nan 0.1000 -0.0001
## 260 0.7138 nan 0.1000 -0.0001
## 280 0.6966 nan 0.1000 -0.0001
## 300 0.6797 nan 0.1000 0.0005
## 320 0.6653 nan 0.1000 -0.0000
## 340 0.6520 nan 0.1000 -0.0000
## 360 0.6379 nan 0.1000 0.0004
## 380 0.6236 nan 0.1000 -0.0005
## 400 0.6124 nan 0.1000 -0.0002
## 420 0.5993 nan 0.1000 -0.0004
## 440 0.5878 nan 0.1000 -0.0003
## 460 0.5786 nan 0.1000 -0.0007
## 480 0.5682 nan 0.1000 -0.0001
## 500 0.5599 nan 0.1000 -0.0001
##
## - Fold04: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## + Fold04: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3338 nan 0.1000 0.0148
## 2 1.3084 nan 0.1000 0.0113
## 3 1.2868 nan 0.1000 0.0093
## 4 1.2665 nan 0.1000 0.0093
## 5 1.2483 nan 0.1000 0.0075
## 6 1.2338 nan 0.1000 0.0052
## 7 1.2207 nan 0.1000 0.0055
## 8 1.2071 nan 0.1000 0.0051
## 9 1.1942 nan 0.1000 0.0053
## 10 1.1820 nan 0.1000 0.0053
## 20 1.1011 nan 0.1000 0.0032
## 40 0.9980 nan 0.1000 0.0019
## 60 0.9385 nan 0.1000 0.0004
## 80 0.8841 nan 0.1000 0.0005
## 100 0.8384 nan 0.1000 -0.0003
## 120 0.8022 nan 0.1000 -0.0004
## 140 0.7728 nan 0.1000 -0.0005
## 160 0.7458 nan 0.1000 -0.0002
## 180 0.7187 nan 0.1000 -0.0004
## 200 0.6954 nan 0.1000 -0.0004
## 220 0.6751 nan 0.1000 -0.0004
## 240 0.6554 nan 0.1000 0.0000
## 260 0.6378 nan 0.1000 -0.0004
## 280 0.6176 nan 0.1000 0.0004
## 300 0.5982 nan 0.1000 -0.0001
## 320 0.5807 nan 0.1000 -0.0002
## 340 0.5658 nan 0.1000 -0.0004
## 360 0.5530 nan 0.1000 -0.0007
## 380 0.5399 nan 0.1000 -0.0004
## 400 0.5255 nan 0.1000 -0.0005
## 420 0.5130 nan 0.1000 -0.0004
## 440 0.4999 nan 0.1000 0.0001
## 460 0.4879 nan 0.1000 -0.0002
## 480 0.4780 nan 0.1000 -0.0004
## 500 0.4656 nan 0.1000 -0.0003
##
## - Fold04: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## + Fold04: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3338 nan 0.1000 0.0145
## 2 1.3070 nan 0.1000 0.0109
## 3 1.2880 nan 0.1000 0.0083
## 4 1.2686 nan 0.1000 0.0092
## 5 1.2478 nan 0.1000 0.0089
## 6 1.2305 nan 0.1000 0.0074
## 7 1.2137 nan 0.1000 0.0075
## 8 1.1969 nan 0.1000 0.0074
## 9 1.1825 nan 0.1000 0.0061
## 10 1.1703 nan 0.1000 0.0053
## 20 1.0871 nan 0.1000 0.0007
## 40 0.9784 nan 0.1000 0.0008
## 60 0.9051 nan 0.1000 0.0004
## 80 0.8399 nan 0.1000 0.0019
## 100 0.7896 nan 0.1000 0.0000
## 120 0.7488 nan 0.1000 -0.0002
## 140 0.7153 nan 0.1000 0.0006
## 160 0.6827 nan 0.1000 -0.0001
## 180 0.6560 nan 0.1000 -0.0003
## 200 0.6276 nan 0.1000 0.0000
## 220 0.6033 nan 0.1000 -0.0003
## 240 0.5817 nan 0.1000 -0.0001
## 260 0.5609 nan 0.1000 -0.0001
## 280 0.5402 nan 0.1000 -0.0005
## 300 0.5236 nan 0.1000 -0.0001
## 320 0.5062 nan 0.1000 -0.0002
## 340 0.4888 nan 0.1000 -0.0003
## 360 0.4710 nan 0.1000 -0.0001
## 380 0.4574 nan 0.1000 0.0001
## 400 0.4439 nan 0.1000 -0.0003
## 420 0.4297 nan 0.1000 -0.0005
## 440 0.4167 nan 0.1000 -0.0004
## 460 0.4042 nan 0.1000 -0.0001
## 480 0.3930 nan 0.1000 -0.0003
## 500 0.3816 nan 0.1000 -0.0001
##
## - Fold04: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## + Fold04: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3331 nan 0.1000 0.0149
## 2 1.3059 nan 0.1000 0.0127
## 3 1.2784 nan 0.1000 0.0133
## 4 1.2576 nan 0.1000 0.0079
## 5 1.2404 nan 0.1000 0.0070
## 6 1.2254 nan 0.1000 0.0065
## 7 1.2090 nan 0.1000 0.0072
## 8 1.1936 nan 0.1000 0.0063
## 9 1.1780 nan 0.1000 0.0066
## 10 1.1659 nan 0.1000 0.0040
## 20 1.0624 nan 0.1000 0.0019
## 40 0.9394 nan 0.1000 0.0017
## 60 0.8549 nan 0.1000 0.0010
## 80 0.7948 nan 0.1000 0.0003
## 100 0.7499 nan 0.1000 0.0002
## 120 0.7060 nan 0.1000 -0.0003
## 140 0.6674 nan 0.1000 0.0004
## 160 0.6357 nan 0.1000 0.0002
## 180 0.6055 nan 0.1000 -0.0001
## 200 0.5757 nan 0.1000 -0.0002
## 220 0.5495 nan 0.1000 -0.0001
## 240 0.5251 nan 0.1000 -0.0000
## 260 0.5030 nan 0.1000 -0.0003
## 280 0.4814 nan 0.1000 -0.0001
## 300 0.4616 nan 0.1000 -0.0001
## 320 0.4432 nan 0.1000 -0.0003
## 340 0.4251 nan 0.1000 -0.0001
## 360 0.4096 nan 0.1000 -0.0000
## 380 0.3937 nan 0.1000 -0.0001
## 400 0.3809 nan 0.1000 -0.0005
## 420 0.3684 nan 0.1000 -0.0006
## 440 0.3556 nan 0.1000 -0.0003
## 460 0.3424 nan 0.1000 -0.0003
## 480 0.3310 nan 0.1000 -0.0002
## 500 0.3195 nan 0.1000 -0.0002
##
## - Fold04: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## + Fold04: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3326 nan 0.1000 0.0153
## 2 1.3017 nan 0.1000 0.0132
## 3 1.2757 nan 0.1000 0.0120
## 4 1.2506 nan 0.1000 0.0113
## 5 1.2304 nan 0.1000 0.0087
## 6 1.2097 nan 0.1000 0.0084
## 7 1.1915 nan 0.1000 0.0071
## 8 1.1756 nan 0.1000 0.0066
## 9 1.1631 nan 0.1000 0.0047
## 10 1.1455 nan 0.1000 0.0073
## 20 1.0446 nan 0.1000 0.0023
## 40 0.8964 nan 0.1000 0.0009
## 60 0.8186 nan 0.1000 0.0004
## 80 0.7558 nan 0.1000 -0.0002
## 100 0.6981 nan 0.1000 0.0009
## 120 0.6547 nan 0.1000 -0.0002
## 140 0.6163 nan 0.1000 -0.0002
## 160 0.5833 nan 0.1000 -0.0007
## 180 0.5518 nan 0.1000 -0.0003
## 200 0.5243 nan 0.1000 -0.0001
## 220 0.4973 nan 0.1000 -0.0001
## 240 0.4730 nan 0.1000 -0.0002
## 260 0.4489 nan 0.1000 -0.0005
## 280 0.4280 nan 0.1000 -0.0005
## 300 0.4088 nan 0.1000 -0.0005
## 320 0.3895 nan 0.1000 -0.0004
## 340 0.3731 nan 0.1000 -0.0003
## 360 0.3579 nan 0.1000 -0.0005
## 380 0.3422 nan 0.1000 -0.0004
## 400 0.3278 nan 0.1000 -0.0005
## 420 0.3139 nan 0.1000 -0.0001
## 440 0.3021 nan 0.1000 -0.0001
## 460 0.2887 nan 0.1000 -0.0002
## 480 0.2770 nan 0.1000 -0.0004
## 500 0.2651 nan 0.1000 -0.0001
##
## - Fold04: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## + Fold04: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3254 nan 0.1000 0.0177
## 2 1.2954 nan 0.1000 0.0127
## 3 1.2699 nan 0.1000 0.0097
## 4 1.2453 nan 0.1000 0.0105
## 5 1.2212 nan 0.1000 0.0109
## 6 1.1991 nan 0.1000 0.0089
## 7 1.1799 nan 0.1000 0.0077
## 8 1.1632 nan 0.1000 0.0058
## 9 1.1465 nan 0.1000 0.0058
## 10 1.1300 nan 0.1000 0.0070
## 20 1.0264 nan 0.1000 0.0024
## 40 0.8851 nan 0.1000 0.0015
## 60 0.7988 nan 0.1000 0.0001
## 80 0.7314 nan 0.1000 -0.0002
## 100 0.6807 nan 0.1000 -0.0004
## 120 0.6275 nan 0.1000 0.0003
## 140 0.5874 nan 0.1000 -0.0004
## 160 0.5523 nan 0.1000 -0.0007
## 180 0.5185 nan 0.1000 -0.0001
## 200 0.4886 nan 0.1000 -0.0007
## 220 0.4596 nan 0.1000 -0.0006
## 240 0.4356 nan 0.1000 -0.0007
## 260 0.4120 nan 0.1000 -0.0002
## 280 0.3906 nan 0.1000 -0.0002
## 300 0.3712 nan 0.1000 -0.0004
## 320 0.3516 nan 0.1000 -0.0005
## 340 0.3345 nan 0.1000 -0.0005
## 360 0.3176 nan 0.1000 -0.0003
## 380 0.3012 nan 0.1000 -0.0002
## 400 0.2867 nan 0.1000 -0.0004
## 420 0.2723 nan 0.1000 -0.0002
## 440 0.2605 nan 0.1000 -0.0003
## 460 0.2470 nan 0.1000 -0.0003
## 480 0.2358 nan 0.1000 -0.0005
## 500 0.2251 nan 0.1000 0.0000
##
## - Fold04: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## + Fold04: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3264 nan 0.1000 0.0178
## 2 1.2878 nan 0.1000 0.0173
## 3 1.2607 nan 0.1000 0.0116
## 4 1.2309 nan 0.1000 0.0132
## 5 1.2058 nan 0.1000 0.0103
## 6 1.1797 nan 0.1000 0.0115
## 7 1.1593 nan 0.1000 0.0092
## 8 1.1411 nan 0.1000 0.0084
## 9 1.1254 nan 0.1000 0.0053
## 10 1.1123 nan 0.1000 0.0051
## 20 0.9944 nan 0.1000 0.0031
## 40 0.8422 nan 0.1000 0.0014
## 60 0.7512 nan 0.1000 -0.0003
## 80 0.6794 nan 0.1000 -0.0008
## 100 0.6207 nan 0.1000 -0.0002
## 120 0.5719 nan 0.1000 0.0002
## 140 0.5308 nan 0.1000 -0.0005
## 160 0.4939 nan 0.1000 -0.0003
## 180 0.4597 nan 0.1000 -0.0002
## 200 0.4292 nan 0.1000 -0.0005
## 220 0.4022 nan 0.1000 -0.0005
## 240 0.3779 nan 0.1000 -0.0003
## 260 0.3554 nan 0.1000 -0.0004
## 280 0.3335 nan 0.1000 -0.0003
## 300 0.3152 nan 0.1000 -0.0004
## 320 0.2963 nan 0.1000 -0.0006
## 340 0.2804 nan 0.1000 -0.0003
## 360 0.2661 nan 0.1000 -0.0002
## 380 0.2510 nan 0.1000 -0.0003
## 400 0.2377 nan 0.1000 -0.0004
## 420 0.2250 nan 0.1000 -0.0003
## 440 0.2126 nan 0.1000 -0.0003
## 460 0.2007 nan 0.1000 -0.0002
## 480 0.1898 nan 0.1000 -0.0001
## 500 0.1809 nan 0.1000 -0.0003
##
## - Fold04: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## + Fold04: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3225 nan 0.1000 0.0191
## 2 1.2905 nan 0.1000 0.0148
## 3 1.2581 nan 0.1000 0.0139
## 4 1.2247 nan 0.1000 0.0154
## 5 1.2030 nan 0.1000 0.0088
## 6 1.1855 nan 0.1000 0.0069
## 7 1.1646 nan 0.1000 0.0081
## 8 1.1461 nan 0.1000 0.0079
## 9 1.1299 nan 0.1000 0.0060
## 10 1.1098 nan 0.1000 0.0081
## 20 0.9882 nan 0.1000 0.0026
## 40 0.8360 nan 0.1000 0.0010
## 60 0.7388 nan 0.1000 0.0013
## 80 0.6700 nan 0.1000 -0.0007
## 100 0.6133 nan 0.1000 -0.0006
## 120 0.5623 nan 0.1000 -0.0003
## 140 0.5182 nan 0.1000 -0.0003
## 160 0.4748 nan 0.1000 -0.0002
## 180 0.4418 nan 0.1000 -0.0007
## 200 0.4108 nan 0.1000 -0.0006
## 220 0.3851 nan 0.1000 -0.0004
## 240 0.3574 nan 0.1000 -0.0002
## 260 0.3349 nan 0.1000 -0.0004
## 280 0.3138 nan 0.1000 -0.0002
## 300 0.2944 nan 0.1000 -0.0004
## 320 0.2769 nan 0.1000 -0.0005
## 340 0.2604 nan 0.1000 -0.0002
## 360 0.2450 nan 0.1000 -0.0003
## 380 0.2302 nan 0.1000 -0.0002
## 400 0.2175 nan 0.1000 -0.0001
## 420 0.2048 nan 0.1000 -0.0003
## 440 0.1930 nan 0.1000 -0.0001
## 460 0.1824 nan 0.1000 -0.0003
## 480 0.1714 nan 0.1000 -0.0003
## 500 0.1614 nan 0.1000 -0.0003
##
## - Fold04: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## + Fold05: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3503 nan 0.1000 0.0083
## 2 1.3354 nan 0.1000 0.0068
## 3 1.3230 nan 0.1000 0.0060
## 4 1.3116 nan 0.1000 0.0051
## 5 1.3011 nan 0.1000 0.0050
## 6 1.2928 nan 0.1000 0.0044
## 7 1.2855 nan 0.1000 0.0032
## 8 1.2791 nan 0.1000 0.0029
## 9 1.2723 nan 0.1000 0.0029
## 10 1.2661 nan 0.1000 0.0025
## 20 1.2261 nan 0.1000 0.0011
## 40 1.1798 nan 0.1000 0.0006
## 60 1.1502 nan 0.1000 0.0004
## 80 1.1306 nan 0.1000 0.0003
## 100 1.1146 nan 0.1000 0.0001
## 120 1.1010 nan 0.1000 -0.0000
## 140 1.0900 nan 0.1000 0.0000
## 160 1.0792 nan 0.1000 -0.0003
## 180 1.0708 nan 0.1000 -0.0007
## 200 1.0622 nan 0.1000 0.0001
## 220 1.0548 nan 0.1000 -0.0000
## 240 1.0473 nan 0.1000 -0.0001
## 260 1.0396 nan 0.1000 0.0001
## 280 1.0328 nan 0.1000 -0.0001
## 300 1.0276 nan 0.1000 -0.0003
## 320 1.0210 nan 0.1000 0.0000
## 340 1.0153 nan 0.1000 0.0000
## 360 1.0096 nan 0.1000 0.0000
## 380 1.0045 nan 0.1000 -0.0002
## 400 0.9996 nan 0.1000 -0.0002
## 420 0.9941 nan 0.1000 -0.0002
## 440 0.9889 nan 0.1000 0.0000
## 460 0.9844 nan 0.1000 -0.0001
## 480 0.9786 nan 0.1000 -0.0001
## 500 0.9736 nan 0.1000 -0.0003
##
## - Fold05: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## + Fold05: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3446 nan 0.1000 0.0092
## 2 1.3262 nan 0.1000 0.0089
## 3 1.3109 nan 0.1000 0.0071
## 4 1.2953 nan 0.1000 0.0071
## 5 1.2832 nan 0.1000 0.0052
## 6 1.2734 nan 0.1000 0.0043
## 7 1.2620 nan 0.1000 0.0052
## 8 1.2522 nan 0.1000 0.0045
## 9 1.2442 nan 0.1000 0.0038
## 10 1.2365 nan 0.1000 0.0037
## 20 1.1709 nan 0.1000 0.0013
## 40 1.0981 nan 0.1000 0.0010
## 60 1.0514 nan 0.1000 0.0010
## 80 1.0168 nan 0.1000 -0.0000
## 100 0.9819 nan 0.1000 -0.0003
## 120 0.9522 nan 0.1000 0.0005
## 140 0.9309 nan 0.1000 0.0003
## 160 0.9052 nan 0.1000 0.0006
## 180 0.8858 nan 0.1000 0.0003
## 200 0.8682 nan 0.1000 0.0003
## 220 0.8547 nan 0.1000 -0.0001
## 240 0.8344 nan 0.1000 0.0001
## 260 0.8233 nan 0.1000 -0.0003
## 280 0.8057 nan 0.1000 -0.0004
## 300 0.7942 nan 0.1000 -0.0002
## 320 0.7842 nan 0.1000 -0.0002
## 340 0.7760 nan 0.1000 -0.0000
## 360 0.7659 nan 0.1000 -0.0007
## 380 0.7545 nan 0.1000 -0.0003
## 400 0.7442 nan 0.1000 -0.0004
## 420 0.7366 nan 0.1000 -0.0003
## 440 0.7285 nan 0.1000 -0.0003
## 460 0.7193 nan 0.1000 -0.0002
## 480 0.7114 nan 0.1000 0.0001
## 500 0.7011 nan 0.1000 0.0002
##
## - Fold05: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## + Fold05: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3428 nan 0.1000 0.0102
## 2 1.3228 nan 0.1000 0.0096
## 3 1.3075 nan 0.1000 0.0074
## 4 1.2909 nan 0.1000 0.0074
## 5 1.2766 nan 0.1000 0.0067
## 6 1.2629 nan 0.1000 0.0063
## 7 1.2517 nan 0.1000 0.0050
## 8 1.2417 nan 0.1000 0.0042
## 9 1.2307 nan 0.1000 0.0045
## 10 1.2177 nan 0.1000 0.0055
## 20 1.1475 nan 0.1000 0.0019
## 40 1.0568 nan 0.1000 0.0003
## 60 0.9978 nan 0.1000 0.0001
## 80 0.9494 nan 0.1000 0.0004
## 100 0.9083 nan 0.1000 -0.0005
## 120 0.8775 nan 0.1000 -0.0001
## 140 0.8487 nan 0.1000 -0.0001
## 160 0.8227 nan 0.1000 -0.0001
## 180 0.8001 nan 0.1000 -0.0001
## 200 0.7787 nan 0.1000 0.0001
## 220 0.7566 nan 0.1000 -0.0002
## 240 0.7384 nan 0.1000 -0.0002
## 260 0.7202 nan 0.1000 -0.0000
## 280 0.7063 nan 0.1000 -0.0002
## 300 0.6913 nan 0.1000 -0.0003
## 320 0.6765 nan 0.1000 0.0001
## 340 0.6620 nan 0.1000 0.0002
## 360 0.6482 nan 0.1000 -0.0002
## 380 0.6369 nan 0.1000 -0.0002
## 400 0.6245 nan 0.1000 0.0001
## 420 0.6125 nan 0.1000 0.0000
## 440 0.6010 nan 0.1000 -0.0003
## 460 0.5892 nan 0.1000 -0.0002
## 480 0.5786 nan 0.1000 -0.0000
## 500 0.5685 nan 0.1000 -0.0002
##
## - Fold05: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## + Fold05: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3407 nan 0.1000 0.0131
## 2 1.3161 nan 0.1000 0.0116
## 3 1.2919 nan 0.1000 0.0109
## 4 1.2730 nan 0.1000 0.0086
## 5 1.2541 nan 0.1000 0.0092
## 6 1.2374 nan 0.1000 0.0074
## 7 1.2236 nan 0.1000 0.0048
## 8 1.2093 nan 0.1000 0.0058
## 9 1.1978 nan 0.1000 0.0054
## 10 1.1883 nan 0.1000 0.0038
## 20 1.1057 nan 0.1000 0.0016
## 40 1.0071 nan 0.1000 0.0018
## 60 0.9423 nan 0.1000 -0.0003
## 80 0.8948 nan 0.1000 0.0007
## 100 0.8462 nan 0.1000 0.0004
## 120 0.8004 nan 0.1000 0.0001
## 140 0.7662 nan 0.1000 -0.0001
## 160 0.7383 nan 0.1000 0.0004
## 180 0.7133 nan 0.1000 -0.0001
## 200 0.6915 nan 0.1000 -0.0003
## 220 0.6687 nan 0.1000 -0.0003
## 240 0.6467 nan 0.1000 -0.0003
## 260 0.6273 nan 0.1000 -0.0001
## 280 0.6119 nan 0.1000 -0.0003
## 300 0.5967 nan 0.1000 0.0001
## 320 0.5805 nan 0.1000 -0.0004
## 340 0.5647 nan 0.1000 0.0002
## 360 0.5508 nan 0.1000 -0.0003
## 380 0.5365 nan 0.1000 -0.0006
## 400 0.5222 nan 0.1000 -0.0003
## 420 0.5093 nan 0.1000 -0.0001
## 440 0.4975 nan 0.1000 -0.0002
## 460 0.4851 nan 0.1000 -0.0003
## 480 0.4745 nan 0.1000 -0.0003
## 500 0.4648 nan 0.1000 -0.0002
##
## - Fold05: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## + Fold05: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3319 nan 0.1000 0.0161
## 2 1.3043 nan 0.1000 0.0128
## 3 1.2800 nan 0.1000 0.0110
## 4 1.2591 nan 0.1000 0.0079
## 5 1.2381 nan 0.1000 0.0100
## 6 1.2170 nan 0.1000 0.0092
## 7 1.2011 nan 0.1000 0.0065
## 8 1.1847 nan 0.1000 0.0064
## 9 1.1723 nan 0.1000 0.0045
## 10 1.1598 nan 0.1000 0.0038
## 20 1.0603 nan 0.1000 0.0046
## 40 0.9540 nan 0.1000 0.0002
## 60 0.8826 nan 0.1000 0.0006
## 80 0.8264 nan 0.1000 0.0005
## 100 0.7751 nan 0.1000 0.0003
## 120 0.7369 nan 0.1000 -0.0001
## 140 0.7024 nan 0.1000 0.0000
## 160 0.6710 nan 0.1000 0.0002
## 180 0.6430 nan 0.1000 0.0003
## 200 0.6136 nan 0.1000 -0.0003
## 220 0.5903 nan 0.1000 -0.0004
## 240 0.5665 nan 0.1000 -0.0002
## 260 0.5480 nan 0.1000 -0.0004
## 280 0.5292 nan 0.1000 -0.0005
## 300 0.5121 nan 0.1000 -0.0004
## 320 0.4966 nan 0.1000 -0.0006
## 340 0.4816 nan 0.1000 -0.0005
## 360 0.4674 nan 0.1000 -0.0002
## 380 0.4533 nan 0.1000 -0.0002
## 400 0.4395 nan 0.1000 -0.0002
## 420 0.4253 nan 0.1000 -0.0002
## 440 0.4140 nan 0.1000 -0.0004
## 460 0.4012 nan 0.1000 -0.0005
## 480 0.3916 nan 0.1000 -0.0004
## 500 0.3806 nan 0.1000 -0.0003
##
## - Fold05: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## + Fold05: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3325 nan 0.1000 0.0161
## 2 1.3059 nan 0.1000 0.0111
## 3 1.2797 nan 0.1000 0.0120
## 4 1.2569 nan 0.1000 0.0084
## 5 1.2360 nan 0.1000 0.0083
## 6 1.2166 nan 0.1000 0.0087
## 7 1.2022 nan 0.1000 0.0056
## 8 1.1858 nan 0.1000 0.0066
## 9 1.1703 nan 0.1000 0.0056
## 10 1.1535 nan 0.1000 0.0078
## 20 1.0553 nan 0.1000 0.0032
## 40 0.9450 nan 0.1000 0.0002
## 60 0.8645 nan 0.1000 0.0005
## 80 0.7994 nan 0.1000 -0.0003
## 100 0.7488 nan 0.1000 0.0005
## 120 0.7042 nan 0.1000 0.0002
## 140 0.6686 nan 0.1000 -0.0001
## 160 0.6325 nan 0.1000 0.0003
## 180 0.6002 nan 0.1000 0.0002
## 200 0.5726 nan 0.1000 -0.0002
## 220 0.5463 nan 0.1000 0.0002
## 240 0.5227 nan 0.1000 -0.0003
## 260 0.4989 nan 0.1000 -0.0003
## 280 0.4790 nan 0.1000 -0.0006
## 300 0.4583 nan 0.1000 -0.0006
## 320 0.4405 nan 0.1000 -0.0000
## 340 0.4237 nan 0.1000 -0.0001
## 360 0.4059 nan 0.1000 -0.0003
## 380 0.3916 nan 0.1000 -0.0002
## 400 0.3769 nan 0.1000 -0.0007
## 420 0.3644 nan 0.1000 -0.0001
## 440 0.3507 nan 0.1000 -0.0002
## 460 0.3368 nan 0.1000 -0.0001
## 480 0.3246 nan 0.1000 -0.0000
## 500 0.3132 nan 0.1000 -0.0002
##
## - Fold05: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## + Fold05: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3332 nan 0.1000 0.0146
## 2 1.3069 nan 0.1000 0.0108
## 3 1.2777 nan 0.1000 0.0120
## 4 1.2542 nan 0.1000 0.0102
## 5 1.2341 nan 0.1000 0.0090
## 6 1.2131 nan 0.1000 0.0085
## 7 1.1951 nan 0.1000 0.0067
## 8 1.1773 nan 0.1000 0.0066
## 9 1.1620 nan 0.1000 0.0061
## 10 1.1491 nan 0.1000 0.0053
## 20 1.0475 nan 0.1000 0.0021
## 40 0.9151 nan 0.1000 0.0006
## 60 0.8376 nan 0.1000 -0.0001
## 80 0.7658 nan 0.1000 0.0015
## 100 0.7130 nan 0.1000 -0.0003
## 120 0.6672 nan 0.1000 0.0005
## 140 0.6291 nan 0.1000 -0.0003
## 160 0.5909 nan 0.1000 0.0004
## 180 0.5621 nan 0.1000 -0.0001
## 200 0.5323 nan 0.1000 -0.0004
## 220 0.5074 nan 0.1000 -0.0004
## 240 0.4829 nan 0.1000 -0.0004
## 260 0.4598 nan 0.1000 -0.0002
## 280 0.4390 nan 0.1000 -0.0007
## 300 0.4184 nan 0.1000 -0.0003
## 320 0.3985 nan 0.1000 -0.0006
## 340 0.3817 nan 0.1000 -0.0003
## 360 0.3649 nan 0.1000 -0.0003
## 380 0.3498 nan 0.1000 -0.0003
## 400 0.3356 nan 0.1000 -0.0004
## 420 0.3187 nan 0.1000 -0.0002
## 440 0.3054 nan 0.1000 -0.0002
## 460 0.2924 nan 0.1000 -0.0002
## 480 0.2809 nan 0.1000 -0.0001
## 500 0.2684 nan 0.1000 -0.0003
##
## - Fold05: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## + Fold05: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3282 nan 0.1000 0.0175
## 2 1.2945 nan 0.1000 0.0146
## 3 1.2638 nan 0.1000 0.0130
## 4 1.2379 nan 0.1000 0.0102
## 5 1.2195 nan 0.1000 0.0072
## 6 1.1989 nan 0.1000 0.0083
## 7 1.1821 nan 0.1000 0.0064
## 8 1.1662 nan 0.1000 0.0063
## 9 1.1507 nan 0.1000 0.0063
## 10 1.1339 nan 0.1000 0.0071
## 20 1.0221 nan 0.1000 0.0033
## 40 0.8948 nan 0.1000 0.0004
## 60 0.8054 nan 0.1000 0.0008
## 80 0.7364 nan 0.1000 0.0001
## 100 0.6814 nan 0.1000 -0.0001
## 120 0.6323 nan 0.1000 -0.0002
## 140 0.5919 nan 0.1000 -0.0003
## 160 0.5563 nan 0.1000 -0.0002
## 180 0.5210 nan 0.1000 -0.0002
## 200 0.4924 nan 0.1000 0.0000
## 220 0.4656 nan 0.1000 -0.0002
## 240 0.4413 nan 0.1000 0.0003
## 260 0.4169 nan 0.1000 0.0001
## 280 0.3943 nan 0.1000 -0.0002
## 300 0.3741 nan 0.1000 -0.0001
## 320 0.3550 nan 0.1000 -0.0003
## 340 0.3368 nan 0.1000 -0.0002
## 360 0.3205 nan 0.1000 -0.0005
## 380 0.3061 nan 0.1000 -0.0004
## 400 0.2902 nan 0.1000 -0.0003
## 420 0.2757 nan 0.1000 -0.0001
## 440 0.2626 nan 0.1000 -0.0002
## 460 0.2511 nan 0.1000 -0.0001
## 480 0.2403 nan 0.1000 -0.0001
## 500 0.2298 nan 0.1000 -0.0003
##
## - Fold05: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## + Fold05: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3262 nan 0.1000 0.0172
## 2 1.2953 nan 0.1000 0.0141
## 3 1.2659 nan 0.1000 0.0138
## 4 1.2423 nan 0.1000 0.0083
## 5 1.2174 nan 0.1000 0.0104
## 6 1.1949 nan 0.1000 0.0099
## 7 1.1756 nan 0.1000 0.0072
## 8 1.1530 nan 0.1000 0.0101
## 9 1.1361 nan 0.1000 0.0064
## 10 1.1183 nan 0.1000 0.0071
## 20 0.9980 nan 0.1000 0.0018
## 40 0.8437 nan 0.1000 0.0014
## 60 0.7526 nan 0.1000 0.0010
## 80 0.6904 nan 0.1000 0.0002
## 100 0.6278 nan 0.1000 0.0002
## 120 0.5816 nan 0.1000 0.0005
## 140 0.5357 nan 0.1000 -0.0001
## 160 0.4969 nan 0.1000 -0.0007
## 180 0.4636 nan 0.1000 0.0003
## 200 0.4349 nan 0.1000 0.0000
## 220 0.4066 nan 0.1000 -0.0001
## 240 0.3821 nan 0.1000 -0.0003
## 260 0.3591 nan 0.1000 -0.0002
## 280 0.3381 nan 0.1000 -0.0003
## 300 0.3177 nan 0.1000 -0.0003
## 320 0.3000 nan 0.1000 -0.0002
## 340 0.2823 nan 0.1000 -0.0001
## 360 0.2658 nan 0.1000 -0.0002
## 380 0.2505 nan 0.1000 -0.0004
## 400 0.2376 nan 0.1000 -0.0002
## 420 0.2264 nan 0.1000 -0.0003
## 440 0.2152 nan 0.1000 -0.0004
## 460 0.2021 nan 0.1000 -0.0003
## 480 0.1916 nan 0.1000 -0.0003
## 500 0.1814 nan 0.1000 -0.0002
##
## - Fold05: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## + Fold05: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3251 nan 0.1000 0.0180
## 2 1.2953 nan 0.1000 0.0133
## 3 1.2692 nan 0.1000 0.0099
## 4 1.2423 nan 0.1000 0.0114
## 5 1.2192 nan 0.1000 0.0104
## 6 1.2006 nan 0.1000 0.0076
## 7 1.1783 nan 0.1000 0.0094
## 8 1.1561 nan 0.1000 0.0104
## 9 1.1384 nan 0.1000 0.0063
## 10 1.1229 nan 0.1000 0.0053
## 20 0.9929 nan 0.1000 0.0010
## 40 0.8426 nan 0.1000 0.0009
## 60 0.7487 nan 0.1000 0.0005
## 80 0.6762 nan 0.1000 0.0004
## 100 0.6136 nan 0.1000 -0.0003
## 120 0.5599 nan 0.1000 0.0002
## 140 0.5157 nan 0.1000 0.0000
## 160 0.4791 nan 0.1000 -0.0000
## 180 0.4443 nan 0.1000 -0.0001
## 200 0.4093 nan 0.1000 -0.0004
## 220 0.3807 nan 0.1000 -0.0002
## 240 0.3566 nan 0.1000 -0.0005
## 260 0.3322 nan 0.1000 -0.0001
## 280 0.3101 nan 0.1000 -0.0003
## 300 0.2902 nan 0.1000 -0.0002
## 320 0.2726 nan 0.1000 -0.0005
## 340 0.2565 nan 0.1000 -0.0002
## 360 0.2403 nan 0.1000 -0.0001
## 380 0.2249 nan 0.1000 -0.0001
## 400 0.2120 nan 0.1000 -0.0001
## 420 0.1994 nan 0.1000 -0.0001
## 440 0.1883 nan 0.1000 -0.0002
## 460 0.1770 nan 0.1000 -0.0004
## 480 0.1663 nan 0.1000 -0.0002
## 500 0.1573 nan 0.1000 -0.0002
##
## - Fold05: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## + Fold06: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3500 nan 0.1000 0.0081
## 2 1.3354 nan 0.1000 0.0070
## 3 1.3224 nan 0.1000 0.0057
## 4 1.3104 nan 0.1000 0.0050
## 5 1.3005 nan 0.1000 0.0050
## 6 1.2919 nan 0.1000 0.0040
## 7 1.2847 nan 0.1000 0.0034
## 8 1.2763 nan 0.1000 0.0029
## 9 1.2713 nan 0.1000 0.0025
## 10 1.2652 nan 0.1000 0.0023
## 20 1.2229 nan 0.1000 0.0011
## 40 1.1783 nan 0.1000 0.0003
## 60 1.1489 nan 0.1000 -0.0001
## 80 1.1279 nan 0.1000 0.0001
## 100 1.1125 nan 0.1000 -0.0003
## 120 1.0987 nan 0.1000 -0.0000
## 140 1.0891 nan 0.1000 -0.0002
## 160 1.0786 nan 0.1000 -0.0002
## 180 1.0695 nan 0.1000 -0.0001
## 200 1.0615 nan 0.1000 0.0001
## 220 1.0533 nan 0.1000 0.0001
## 240 1.0449 nan 0.1000 -0.0001
## 260 1.0385 nan 0.1000 0.0000
## 280 1.0317 nan 0.1000 -0.0001
## 300 1.0252 nan 0.1000 -0.0004
## 320 1.0191 nan 0.1000 -0.0003
## 340 1.0130 nan 0.1000 -0.0004
## 360 1.0074 nan 0.1000 -0.0001
## 380 1.0020 nan 0.1000 -0.0002
## 400 0.9967 nan 0.1000 -0.0002
## 420 0.9909 nan 0.1000 0.0000
## 440 0.9859 nan 0.1000 -0.0000
## 460 0.9815 nan 0.1000 -0.0002
## 480 0.9763 nan 0.1000 -0.0002
## 500 0.9720 nan 0.1000 -0.0006
##
## - Fold06: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## + Fold06: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3400 nan 0.1000 0.0115
## 2 1.3206 nan 0.1000 0.0092
## 3 1.3037 nan 0.1000 0.0075
## 4 1.2891 nan 0.1000 0.0065
## 5 1.2784 nan 0.1000 0.0050
## 6 1.2666 nan 0.1000 0.0053
## 7 1.2567 nan 0.1000 0.0047
## 8 1.2465 nan 0.1000 0.0045
## 9 1.2387 nan 0.1000 0.0037
## 10 1.2301 nan 0.1000 0.0037
## 20 1.1706 nan 0.1000 0.0009
## 40 1.0997 nan 0.1000 0.0004
## 60 1.0483 nan 0.1000 -0.0000
## 80 1.0131 nan 0.1000 0.0000
## 100 0.9787 nan 0.1000 0.0012
## 120 0.9535 nan 0.1000 0.0001
## 140 0.9264 nan 0.1000 0.0005
## 160 0.9061 nan 0.1000 -0.0000
## 180 0.8879 nan 0.1000 -0.0002
## 200 0.8690 nan 0.1000 0.0013
## 220 0.8517 nan 0.1000 -0.0001
## 240 0.8347 nan 0.1000 -0.0005
## 260 0.8226 nan 0.1000 -0.0000
## 280 0.8106 nan 0.1000 -0.0002
## 300 0.7994 nan 0.1000 -0.0001
## 320 0.7855 nan 0.1000 -0.0001
## 340 0.7740 nan 0.1000 0.0002
## 360 0.7619 nan 0.1000 -0.0002
## 380 0.7533 nan 0.1000 0.0001
## 400 0.7418 nan 0.1000 -0.0002
## 420 0.7304 nan 0.1000 -0.0003
## 440 0.7213 nan 0.1000 -0.0002
## 460 0.7099 nan 0.1000 -0.0004
## 480 0.7005 nan 0.1000 -0.0003
## 500 0.6945 nan 0.1000 -0.0002
##
## - Fold06: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## + Fold06: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3422 nan 0.1000 0.0108
## 2 1.3232 nan 0.1000 0.0094
## 3 1.3077 nan 0.1000 0.0061
## 4 1.2932 nan 0.1000 0.0062
## 5 1.2778 nan 0.1000 0.0069
## 6 1.2643 nan 0.1000 0.0052
## 7 1.2524 nan 0.1000 0.0054
## 8 1.2424 nan 0.1000 0.0033
## 9 1.2336 nan 0.1000 0.0042
## 10 1.2237 nan 0.1000 0.0036
## 20 1.1577 nan 0.1000 0.0040
## 40 1.0700 nan 0.1000 0.0012
## 60 1.0158 nan 0.1000 0.0007
## 80 0.9744 nan 0.1000 0.0018
## 100 0.9348 nan 0.1000 0.0003
## 120 0.9040 nan 0.1000 -0.0002
## 140 0.8689 nan 0.1000 -0.0000
## 160 0.8431 nan 0.1000 -0.0008
## 180 0.8210 nan 0.1000 0.0003
## 200 0.8011 nan 0.1000 -0.0003
## 220 0.7813 nan 0.1000 0.0001
## 240 0.7607 nan 0.1000 -0.0004
## 260 0.7432 nan 0.1000 0.0001
## 280 0.7248 nan 0.1000 -0.0007
## 300 0.7086 nan 0.1000 -0.0001
## 320 0.6949 nan 0.1000 -0.0002
## 340 0.6823 nan 0.1000 -0.0004
## 360 0.6660 nan 0.1000 -0.0004
## 380 0.6552 nan 0.1000 -0.0008
## 400 0.6437 nan 0.1000 -0.0002
## 420 0.6321 nan 0.1000 -0.0003
## 440 0.6216 nan 0.1000 -0.0004
## 460 0.6102 nan 0.1000 -0.0003
## 480 0.5996 nan 0.1000 -0.0005
## 500 0.5902 nan 0.1000 -0.0003
##
## - Fold06: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## + Fold06: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3422 nan 0.1000 0.0113
## 2 1.3179 nan 0.1000 0.0099
## 3 1.2998 nan 0.1000 0.0083
## 4 1.2828 nan 0.1000 0.0068
## 5 1.2671 nan 0.1000 0.0067
## 6 1.2526 nan 0.1000 0.0058
## 7 1.2373 nan 0.1000 0.0075
## 8 1.2268 nan 0.1000 0.0041
## 9 1.2187 nan 0.1000 0.0024
## 10 1.2085 nan 0.1000 0.0043
## 20 1.1261 nan 0.1000 0.0012
## 40 1.0238 nan 0.1000 0.0017
## 60 0.9592 nan 0.1000 0.0013
## 80 0.9131 nan 0.1000 0.0003
## 100 0.8691 nan 0.1000 0.0005
## 120 0.8297 nan 0.1000 -0.0001
## 140 0.7967 nan 0.1000 -0.0003
## 160 0.7606 nan 0.1000 0.0004
## 180 0.7318 nan 0.1000 0.0003
## 200 0.7083 nan 0.1000 -0.0003
## 220 0.6873 nan 0.1000 -0.0002
## 240 0.6656 nan 0.1000 0.0001
## 260 0.6456 nan 0.1000 -0.0003
## 280 0.6274 nan 0.1000 0.0000
## 300 0.6087 nan 0.1000 -0.0004
## 320 0.5925 nan 0.1000 -0.0001
## 340 0.5766 nan 0.1000 -0.0001
## 360 0.5596 nan 0.1000 0.0002
## 380 0.5445 nan 0.1000 -0.0002
## 400 0.5301 nan 0.1000 -0.0004
## 420 0.5174 nan 0.1000 -0.0002
## 440 0.5060 nan 0.1000 -0.0003
## 460 0.4929 nan 0.1000 -0.0004
## 480 0.4817 nan 0.1000 -0.0002
## 500 0.4700 nan 0.1000 -0.0004
##
## - Fold06: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## + Fold06: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3356 nan 0.1000 0.0149
## 2 1.3109 nan 0.1000 0.0106
## 3 1.2887 nan 0.1000 0.0108
## 4 1.2681 nan 0.1000 0.0087
## 5 1.2533 nan 0.1000 0.0063
## 6 1.2345 nan 0.1000 0.0093
## 7 1.2190 nan 0.1000 0.0069
## 8 1.2072 nan 0.1000 0.0046
## 9 1.1923 nan 0.1000 0.0068
## 10 1.1813 nan 0.1000 0.0045
## 20 1.0919 nan 0.1000 0.0018
## 40 0.9871 nan 0.1000 0.0007
## 60 0.9174 nan 0.1000 0.0002
## 80 0.8541 nan 0.1000 0.0011
## 100 0.7995 nan 0.1000 0.0006
## 120 0.7570 nan 0.1000 -0.0005
## 140 0.7242 nan 0.1000 -0.0002
## 160 0.6885 nan 0.1000 -0.0000
## 180 0.6631 nan 0.1000 -0.0003
## 200 0.6363 nan 0.1000 -0.0002
## 220 0.6116 nan 0.1000 0.0002
## 240 0.5890 nan 0.1000 -0.0001
## 260 0.5681 nan 0.1000 -0.0002
## 280 0.5499 nan 0.1000 -0.0000
## 300 0.5327 nan 0.1000 -0.0005
## 320 0.5169 nan 0.1000 -0.0001
## 340 0.4990 nan 0.1000 -0.0002
## 360 0.4833 nan 0.1000 0.0001
## 380 0.4691 nan 0.1000 -0.0002
## 400 0.4542 nan 0.1000 -0.0000
## 420 0.4407 nan 0.1000 -0.0002
## 440 0.4268 nan 0.1000 -0.0005
## 460 0.4142 nan 0.1000 -0.0005
## 480 0.4023 nan 0.1000 -0.0003
## 500 0.3912 nan 0.1000 -0.0005
##
## - Fold06: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## + Fold06: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3386 nan 0.1000 0.0124
## 2 1.3142 nan 0.1000 0.0111
## 3 1.2910 nan 0.1000 0.0100
## 4 1.2664 nan 0.1000 0.0113
## 5 1.2469 nan 0.1000 0.0073
## 6 1.2287 nan 0.1000 0.0071
## 7 1.2165 nan 0.1000 0.0046
## 8 1.2034 nan 0.1000 0.0045
## 9 1.1882 nan 0.1000 0.0064
## 10 1.1756 nan 0.1000 0.0057
## 20 1.0776 nan 0.1000 0.0034
## 40 0.9576 nan 0.1000 0.0011
## 60 0.8769 nan 0.1000 0.0003
## 80 0.8187 nan 0.1000 -0.0002
## 100 0.7630 nan 0.1000 -0.0005
## 120 0.7148 nan 0.1000 -0.0002
## 140 0.6770 nan 0.1000 0.0000
## 160 0.6484 nan 0.1000 -0.0002
## 180 0.6163 nan 0.1000 -0.0001
## 200 0.5871 nan 0.1000 -0.0001
## 220 0.5635 nan 0.1000 -0.0005
## 240 0.5402 nan 0.1000 -0.0004
## 260 0.5171 nan 0.1000 -0.0003
## 280 0.4970 nan 0.1000 0.0002
## 300 0.4762 nan 0.1000 -0.0003
## 320 0.4579 nan 0.1000 -0.0002
## 340 0.4423 nan 0.1000 -0.0003
## 360 0.4247 nan 0.1000 -0.0002
## 380 0.4080 nan 0.1000 -0.0004
## 400 0.3934 nan 0.1000 -0.0004
## 420 0.3791 nan 0.1000 -0.0002
## 440 0.3657 nan 0.1000 -0.0004
## 460 0.3533 nan 0.1000 -0.0002
## 480 0.3422 nan 0.1000 -0.0004
## 500 0.3304 nan 0.1000 -0.0002
##
## - Fold06: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## + Fold06: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3290 nan 0.1000 0.0172
## 2 1.2975 nan 0.1000 0.0135
## 3 1.2753 nan 0.1000 0.0092
## 4 1.2560 nan 0.1000 0.0081
## 5 1.2348 nan 0.1000 0.0088
## 6 1.2148 nan 0.1000 0.0092
## 7 1.1972 nan 0.1000 0.0075
## 8 1.1830 nan 0.1000 0.0053
## 9 1.1705 nan 0.1000 0.0045
## 10 1.1560 nan 0.1000 0.0062
## 20 1.0547 nan 0.1000 0.0023
## 40 0.9273 nan 0.1000 0.0000
## 60 0.8439 nan 0.1000 -0.0006
## 80 0.7771 nan 0.1000 0.0002
## 100 0.7259 nan 0.1000 0.0001
## 120 0.6824 nan 0.1000 -0.0003
## 140 0.6431 nan 0.1000 -0.0000
## 160 0.6109 nan 0.1000 0.0000
## 180 0.5778 nan 0.1000 -0.0002
## 200 0.5486 nan 0.1000 -0.0002
## 220 0.5205 nan 0.1000 -0.0006
## 240 0.4957 nan 0.1000 -0.0002
## 260 0.4719 nan 0.1000 0.0000
## 280 0.4491 nan 0.1000 -0.0002
## 300 0.4300 nan 0.1000 -0.0005
## 320 0.4112 nan 0.1000 -0.0001
## 340 0.3955 nan 0.1000 -0.0004
## 360 0.3783 nan 0.1000 -0.0004
## 380 0.3615 nan 0.1000 -0.0002
## 400 0.3468 nan 0.1000 -0.0001
## 420 0.3334 nan 0.1000 -0.0002
## 440 0.3208 nan 0.1000 -0.0003
## 460 0.3071 nan 0.1000 -0.0003
## 480 0.2944 nan 0.1000 -0.0002
## 500 0.2823 nan 0.1000 0.0000
##
## - Fold06: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## + Fold06: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3329 nan 0.1000 0.0146
## 2 1.3055 nan 0.1000 0.0121
## 3 1.2813 nan 0.1000 0.0104
## 4 1.2567 nan 0.1000 0.0100
## 5 1.2344 nan 0.1000 0.0100
## 6 1.2144 nan 0.1000 0.0073
## 7 1.1970 nan 0.1000 0.0062
## 8 1.1791 nan 0.1000 0.0077
## 9 1.1626 nan 0.1000 0.0068
## 10 1.1468 nan 0.1000 0.0062
## 20 1.0385 nan 0.1000 0.0023
## 40 0.9030 nan 0.1000 0.0002
## 60 0.8085 nan 0.1000 0.0005
## 80 0.7466 nan 0.1000 -0.0003
## 100 0.6904 nan 0.1000 -0.0001
## 120 0.6385 nan 0.1000 -0.0000
## 140 0.5936 nan 0.1000 0.0003
## 160 0.5592 nan 0.1000 -0.0003
## 180 0.5247 nan 0.1000 -0.0008
## 200 0.4940 nan 0.1000 -0.0002
## 220 0.4655 nan 0.1000 -0.0002
## 240 0.4423 nan 0.1000 -0.0003
## 260 0.4201 nan 0.1000 -0.0010
## 280 0.3987 nan 0.1000 -0.0006
## 300 0.3769 nan 0.1000 -0.0001
## 320 0.3593 nan 0.1000 -0.0005
## 340 0.3407 nan 0.1000 -0.0003
## 360 0.3243 nan 0.1000 -0.0002
## 380 0.3102 nan 0.1000 -0.0002
## 400 0.2951 nan 0.1000 -0.0004
## 420 0.2822 nan 0.1000 -0.0000
## 440 0.2698 nan 0.1000 -0.0003
## 460 0.2565 nan 0.1000 -0.0002
## 480 0.2460 nan 0.1000 -0.0004
## 500 0.2357 nan 0.1000 -0.0002
##
## - Fold06: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## + Fold06: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3289 nan 0.1000 0.0174
## 2 1.2967 nan 0.1000 0.0123
## 3 1.2702 nan 0.1000 0.0107
## 4 1.2438 nan 0.1000 0.0119
## 5 1.2191 nan 0.1000 0.0103
## 6 1.1959 nan 0.1000 0.0097
## 7 1.1803 nan 0.1000 0.0052
## 8 1.1637 nan 0.1000 0.0061
## 9 1.1469 nan 0.1000 0.0063
## 10 1.1320 nan 0.1000 0.0060
## 20 1.0225 nan 0.1000 0.0007
## 40 0.8785 nan 0.1000 0.0007
## 60 0.7833 nan 0.1000 0.0004
## 80 0.7074 nan 0.1000 0.0002
## 100 0.6458 nan 0.1000 -0.0002
## 120 0.5940 nan 0.1000 0.0005
## 140 0.5520 nan 0.1000 0.0000
## 160 0.5177 nan 0.1000 -0.0006
## 180 0.4817 nan 0.1000 -0.0005
## 200 0.4512 nan 0.1000 -0.0003
## 220 0.4231 nan 0.1000 -0.0003
## 240 0.4009 nan 0.1000 -0.0005
## 260 0.3774 nan 0.1000 -0.0004
## 280 0.3564 nan 0.1000 -0.0004
## 300 0.3363 nan 0.1000 -0.0006
## 320 0.3188 nan 0.1000 -0.0005
## 340 0.3023 nan 0.1000 -0.0004
## 360 0.2864 nan 0.1000 -0.0004
## 380 0.2701 nan 0.1000 -0.0002
## 400 0.2562 nan 0.1000 -0.0002
## 420 0.2434 nan 0.1000 -0.0002
## 440 0.2297 nan 0.1000 -0.0001
## 460 0.2176 nan 0.1000 -0.0004
## 480 0.2053 nan 0.1000 -0.0002
## 500 0.1953 nan 0.1000 -0.0001
##
## - Fold06: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## + Fold06: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3252 nan 0.1000 0.0181
## 2 1.2920 nan 0.1000 0.0148
## 3 1.2616 nan 0.1000 0.0127
## 4 1.2332 nan 0.1000 0.0109
## 5 1.2097 nan 0.1000 0.0099
## 6 1.1860 nan 0.1000 0.0095
## 7 1.1668 nan 0.1000 0.0069
## 8 1.1523 nan 0.1000 0.0046
## 9 1.1361 nan 0.1000 0.0061
## 10 1.1220 nan 0.1000 0.0038
## 20 1.0015 nan 0.1000 0.0064
## 40 0.8442 nan 0.1000 0.0013
## 60 0.7467 nan 0.1000 0.0000
## 80 0.6727 nan 0.1000 0.0000
## 100 0.6156 nan 0.1000 -0.0005
## 120 0.5673 nan 0.1000 0.0000
## 140 0.5232 nan 0.1000 -0.0002
## 160 0.4855 nan 0.1000 0.0001
## 180 0.4498 nan 0.1000 0.0000
## 200 0.4193 nan 0.1000 -0.0003
## 220 0.3905 nan 0.1000 -0.0003
## 240 0.3665 nan 0.1000 -0.0001
## 260 0.3426 nan 0.1000 -0.0003
## 280 0.3206 nan 0.1000 -0.0005
## 300 0.2996 nan 0.1000 -0.0003
## 320 0.2806 nan 0.1000 -0.0002
## 340 0.2634 nan 0.1000 -0.0003
## 360 0.2483 nan 0.1000 -0.0002
## 380 0.2337 nan 0.1000 -0.0005
## 400 0.2199 nan 0.1000 -0.0003
## 420 0.2076 nan 0.1000 -0.0001
## 440 0.1953 nan 0.1000 -0.0002
## 460 0.1845 nan 0.1000 -0.0002
## 480 0.1746 nan 0.1000 -0.0005
## 500 0.1651 nan 0.1000 -0.0002
##
## - Fold06: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## + Fold07: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3532 nan 0.1000 0.0064
## 2 1.3429 nan 0.1000 0.0056
## 3 1.3330 nan 0.1000 0.0043
## 4 1.3231 nan 0.1000 0.0054
## 5 1.3154 nan 0.1000 0.0038
## 6 1.3072 nan 0.1000 0.0039
## 7 1.3011 nan 0.1000 0.0030
## 8 1.2939 nan 0.1000 0.0029
## 9 1.2877 nan 0.1000 0.0024
## 10 1.2826 nan 0.1000 0.0024
## 20 1.2424 nan 0.1000 0.0015
## 40 1.1968 nan 0.1000 0.0006
## 60 1.1698 nan 0.1000 -0.0001
## 80 1.1500 nan 0.1000 0.0001
## 100 1.1375 nan 0.1000 -0.0003
## 120 1.1247 nan 0.1000 -0.0003
## 140 1.1135 nan 0.1000 -0.0002
## 160 1.1024 nan 0.1000 0.0000
## 180 1.0930 nan 0.1000 -0.0002
## 200 1.0858 nan 0.1000 -0.0001
## 220 1.0784 nan 0.1000 0.0000
## 240 1.0714 nan 0.1000 -0.0000
## 260 1.0644 nan 0.1000 -0.0000
## 280 1.0575 nan 0.1000 -0.0002
## 300 1.0502 nan 0.1000 -0.0001
## 320 1.0439 nan 0.1000 0.0000
## 340 1.0385 nan 0.1000 -0.0004
## 360 1.0338 nan 0.1000 -0.0000
## 380 1.0276 nan 0.1000 -0.0001
## 400 1.0218 nan 0.1000 -0.0003
## 420 1.0172 nan 0.1000 -0.0001
## 440 1.0119 nan 0.1000 -0.0003
## 460 1.0073 nan 0.1000 -0.0006
## 480 1.0027 nan 0.1000 -0.0003
## 500 0.9980 nan 0.1000 -0.0006
##
## - Fold07: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## + Fold07: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3451 nan 0.1000 0.0102
## 2 1.3256 nan 0.1000 0.0092
## 3 1.3121 nan 0.1000 0.0058
## 4 1.2977 nan 0.1000 0.0072
## 5 1.2850 nan 0.1000 0.0058
## 6 1.2762 nan 0.1000 0.0036
## 7 1.2660 nan 0.1000 0.0049
## 8 1.2566 nan 0.1000 0.0037
## 9 1.2474 nan 0.1000 0.0038
## 10 1.2369 nan 0.1000 0.0051
## 20 1.1715 nan 0.1000 0.0025
## 40 1.1003 nan 0.1000 0.0002
## 60 1.0538 nan 0.1000 0.0005
## 80 1.0118 nan 0.1000 -0.0000
## 100 0.9863 nan 0.1000 -0.0001
## 120 0.9572 nan 0.1000 -0.0003
## 140 0.9328 nan 0.1000 -0.0002
## 160 0.9103 nan 0.1000 0.0004
## 180 0.8872 nan 0.1000 0.0002
## 200 0.8691 nan 0.1000 -0.0002
## 220 0.8526 nan 0.1000 -0.0003
## 240 0.8382 nan 0.1000 0.0001
## 260 0.8186 nan 0.1000 -0.0005
## 280 0.8042 nan 0.1000 -0.0000
## 300 0.7869 nan 0.1000 -0.0001
## 320 0.7765 nan 0.1000 -0.0009
## 340 0.7646 nan 0.1000 -0.0007
## 360 0.7541 nan 0.1000 0.0001
## 380 0.7436 nan 0.1000 -0.0001
## 400 0.7333 nan 0.1000 -0.0001
## 420 0.7237 nan 0.1000 -0.0004
## 440 0.7144 nan 0.1000 -0.0002
## 460 0.7048 nan 0.1000 -0.0003
## 480 0.6974 nan 0.1000 -0.0002
## 500 0.6887 nan 0.1000 -0.0003
##
## - Fold07: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## + Fold07: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3416 nan 0.1000 0.0116
## 2 1.3213 nan 0.1000 0.0085
## 3 1.3023 nan 0.1000 0.0088
## 4 1.2884 nan 0.1000 0.0063
## 5 1.2730 nan 0.1000 0.0072
## 6 1.2595 nan 0.1000 0.0063
## 7 1.2454 nan 0.1000 0.0062
## 8 1.2368 nan 0.1000 0.0035
## 9 1.2254 nan 0.1000 0.0050
## 10 1.2157 nan 0.1000 0.0041
## 20 1.1499 nan 0.1000 0.0013
## 40 1.0579 nan 0.1000 0.0007
## 60 0.9999 nan 0.1000 0.0013
## 80 0.9595 nan 0.1000 0.0006
## 100 0.9228 nan 0.1000 0.0004
## 120 0.8870 nan 0.1000 -0.0000
## 140 0.8559 nan 0.1000 -0.0002
## 160 0.8253 nan 0.1000 -0.0001
## 180 0.7981 nan 0.1000 -0.0002
## 200 0.7734 nan 0.1000 0.0007
## 220 0.7551 nan 0.1000 -0.0006
## 240 0.7353 nan 0.1000 -0.0002
## 260 0.7174 nan 0.1000 -0.0001
## 280 0.7029 nan 0.1000 -0.0002
## 300 0.6832 nan 0.1000 -0.0002
## 320 0.6675 nan 0.1000 -0.0001
## 340 0.6558 nan 0.1000 -0.0003
## 360 0.6431 nan 0.1000 -0.0001
## 380 0.6289 nan 0.1000 0.0002
## 400 0.6163 nan 0.1000 -0.0004
## 420 0.6052 nan 0.1000 -0.0003
## 440 0.5938 nan 0.1000 -0.0002
## 460 0.5808 nan 0.1000 -0.0002
## 480 0.5713 nan 0.1000 -0.0006
## 500 0.5606 nan 0.1000 -0.0004
##
## - Fold07: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## + Fold07: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3386 nan 0.1000 0.0116
## 2 1.3117 nan 0.1000 0.0125
## 3 1.2932 nan 0.1000 0.0087
## 4 1.2718 nan 0.1000 0.0103
## 5 1.2543 nan 0.1000 0.0075
## 6 1.2381 nan 0.1000 0.0075
## 7 1.2237 nan 0.1000 0.0064
## 8 1.2096 nan 0.1000 0.0058
## 9 1.1984 nan 0.1000 0.0042
## 10 1.1864 nan 0.1000 0.0053
## 20 1.1050 nan 0.1000 0.0029
## 40 1.0024 nan 0.1000 0.0022
## 60 0.9350 nan 0.1000 0.0001
## 80 0.8906 nan 0.1000 0.0004
## 100 0.8468 nan 0.1000 0.0007
## 120 0.8079 nan 0.1000 0.0008
## 140 0.7702 nan 0.1000 0.0002
## 160 0.7430 nan 0.1000 -0.0002
## 180 0.7174 nan 0.1000 -0.0004
## 200 0.6892 nan 0.1000 0.0005
## 220 0.6696 nan 0.1000 -0.0005
## 240 0.6466 nan 0.1000 -0.0001
## 260 0.6251 nan 0.1000 -0.0003
## 280 0.6050 nan 0.1000 -0.0001
## 300 0.5879 nan 0.1000 0.0002
## 320 0.5696 nan 0.1000 -0.0004
## 340 0.5534 nan 0.1000 -0.0006
## 360 0.5384 nan 0.1000 -0.0003
## 380 0.5234 nan 0.1000 -0.0001
## 400 0.5107 nan 0.1000 -0.0002
## 420 0.4967 nan 0.1000 -0.0004
## 440 0.4846 nan 0.1000 -0.0002
## 460 0.4713 nan 0.1000 -0.0002
## 480 0.4608 nan 0.1000 -0.0003
## 500 0.4496 nan 0.1000 -0.0003
##
## - Fold07: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## + Fold07: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3351 nan 0.1000 0.0136
## 2 1.3098 nan 0.1000 0.0114
## 3 1.2858 nan 0.1000 0.0113
## 4 1.2639 nan 0.1000 0.0100
## 5 1.2456 nan 0.1000 0.0080
## 6 1.2284 nan 0.1000 0.0078
## 7 1.2127 nan 0.1000 0.0070
## 8 1.1966 nan 0.1000 0.0071
## 9 1.1839 nan 0.1000 0.0050
## 10 1.1687 nan 0.1000 0.0062
## 20 1.0827 nan 0.1000 0.0040
## 40 0.9773 nan 0.1000 0.0020
## 60 0.9010 nan 0.1000 0.0001
## 80 0.8452 nan 0.1000 0.0014
## 100 0.7972 nan 0.1000 0.0001
## 120 0.7591 nan 0.1000 0.0003
## 140 0.7227 nan 0.1000 0.0004
## 160 0.6931 nan 0.1000 -0.0002
## 180 0.6606 nan 0.1000 0.0003
## 200 0.6327 nan 0.1000 -0.0000
## 220 0.6062 nan 0.1000 -0.0003
## 240 0.5854 nan 0.1000 -0.0006
## 260 0.5615 nan 0.1000 0.0000
## 280 0.5439 nan 0.1000 -0.0006
## 300 0.5250 nan 0.1000 -0.0001
## 320 0.5083 nan 0.1000 -0.0004
## 340 0.4897 nan 0.1000 -0.0000
## 360 0.4727 nan 0.1000 -0.0003
## 380 0.4568 nan 0.1000 -0.0005
## 400 0.4433 nan 0.1000 -0.0004
## 420 0.4307 nan 0.1000 -0.0006
## 440 0.4177 nan 0.1000 -0.0004
## 460 0.4072 nan 0.1000 -0.0003
## 480 0.3951 nan 0.1000 -0.0004
## 500 0.3846 nan 0.1000 -0.0002
##
## - Fold07: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## + Fold07: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3329 nan 0.1000 0.0158
## 2 1.3032 nan 0.1000 0.0148
## 3 1.2783 nan 0.1000 0.0106
## 4 1.2572 nan 0.1000 0.0093
## 5 1.2351 nan 0.1000 0.0093
## 6 1.2186 nan 0.1000 0.0074
## 7 1.2014 nan 0.1000 0.0078
## 8 1.1864 nan 0.1000 0.0054
## 9 1.1734 nan 0.1000 0.0045
## 10 1.1621 nan 0.1000 0.0040
## 20 1.0698 nan 0.1000 0.0014
## 40 0.9459 nan 0.1000 0.0021
## 60 0.8722 nan 0.1000 0.0005
## 80 0.8030 nan 0.1000 -0.0005
## 100 0.7459 nan 0.1000 0.0008
## 120 0.7038 nan 0.1000 -0.0005
## 140 0.6657 nan 0.1000 -0.0006
## 160 0.6318 nan 0.1000 -0.0003
## 180 0.6003 nan 0.1000 -0.0004
## 200 0.5755 nan 0.1000 -0.0001
## 220 0.5500 nan 0.1000 -0.0002
## 240 0.5282 nan 0.1000 -0.0002
## 260 0.5072 nan 0.1000 -0.0002
## 280 0.4851 nan 0.1000 -0.0002
## 300 0.4631 nan 0.1000 -0.0003
## 320 0.4444 nan 0.1000 -0.0001
## 340 0.4263 nan 0.1000 -0.0003
## 360 0.4107 nan 0.1000 -0.0003
## 380 0.3969 nan 0.1000 -0.0006
## 400 0.3817 nan 0.1000 -0.0004
## 420 0.3680 nan 0.1000 -0.0003
## 440 0.3541 nan 0.1000 0.0002
## 460 0.3420 nan 0.1000 -0.0001
## 480 0.3296 nan 0.1000 -0.0002
## 500 0.3175 nan 0.1000 -0.0005
##
## - Fold07: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## + Fold07: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3322 nan 0.1000 0.0149
## 2 1.3050 nan 0.1000 0.0125
## 3 1.2780 nan 0.1000 0.0116
## 4 1.2525 nan 0.1000 0.0130
## 5 1.2303 nan 0.1000 0.0093
## 6 1.2110 nan 0.1000 0.0079
## 7 1.1954 nan 0.1000 0.0056
## 8 1.1784 nan 0.1000 0.0069
## 9 1.1642 nan 0.1000 0.0046
## 10 1.1503 nan 0.1000 0.0065
## 20 1.0450 nan 0.1000 0.0038
## 40 0.9286 nan 0.1000 0.0001
## 60 0.8417 nan 0.1000 0.0017
## 80 0.7785 nan 0.1000 -0.0002
## 100 0.7178 nan 0.1000 -0.0005
## 120 0.6726 nan 0.1000 -0.0001
## 140 0.6298 nan 0.1000 -0.0002
## 160 0.5908 nan 0.1000 0.0006
## 180 0.5589 nan 0.1000 0.0002
## 200 0.5282 nan 0.1000 -0.0004
## 220 0.4986 nan 0.1000 -0.0002
## 240 0.4731 nan 0.1000 -0.0002
## 260 0.4490 nan 0.1000 -0.0002
## 280 0.4265 nan 0.1000 -0.0002
## 300 0.4063 nan 0.1000 0.0001
## 320 0.3863 nan 0.1000 -0.0003
## 340 0.3678 nan 0.1000 -0.0000
## 360 0.3514 nan 0.1000 -0.0004
## 380 0.3352 nan 0.1000 -0.0004
## 400 0.3203 nan 0.1000 -0.0004
## 420 0.3066 nan 0.1000 -0.0001
## 440 0.2931 nan 0.1000 -0.0005
## 460 0.2801 nan 0.1000 -0.0004
## 480 0.2686 nan 0.1000 -0.0001
## 500 0.2575 nan 0.1000 -0.0003
##
## - Fold07: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## + Fold07: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3257 nan 0.1000 0.0198
## 2 1.2942 nan 0.1000 0.0150
## 3 1.2639 nan 0.1000 0.0132
## 4 1.2398 nan 0.1000 0.0099
## 5 1.2195 nan 0.1000 0.0084
## 6 1.1981 nan 0.1000 0.0086
## 7 1.1800 nan 0.1000 0.0070
## 8 1.1648 nan 0.1000 0.0062
## 9 1.1487 nan 0.1000 0.0051
## 10 1.1359 nan 0.1000 0.0041
## 20 1.0197 nan 0.1000 0.0037
## 40 0.8879 nan 0.1000 -0.0004
## 60 0.7979 nan 0.1000 0.0009
## 80 0.7324 nan 0.1000 -0.0002
## 100 0.6803 nan 0.1000 0.0002
## 120 0.6254 nan 0.1000 0.0005
## 140 0.5828 nan 0.1000 -0.0002
## 160 0.5432 nan 0.1000 0.0006
## 180 0.5129 nan 0.1000 -0.0000
## 200 0.4837 nan 0.1000 -0.0004
## 220 0.4568 nan 0.1000 -0.0000
## 240 0.4313 nan 0.1000 -0.0007
## 260 0.4081 nan 0.1000 -0.0001
## 280 0.3870 nan 0.1000 -0.0002
## 300 0.3654 nan 0.1000 -0.0005
## 320 0.3462 nan 0.1000 -0.0005
## 340 0.3279 nan 0.1000 -0.0002
## 360 0.3118 nan 0.1000 -0.0005
## 380 0.2963 nan 0.1000 -0.0006
## 400 0.2820 nan 0.1000 -0.0003
## 420 0.2680 nan 0.1000 -0.0003
## 440 0.2538 nan 0.1000 -0.0000
## 460 0.2424 nan 0.1000 -0.0002
## 480 0.2303 nan 0.1000 -0.0002
## 500 0.2201 nan 0.1000 -0.0003
##
## - Fold07: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## + Fold07: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3256 nan 0.1000 0.0173
## 2 1.2978 nan 0.1000 0.0109
## 3 1.2689 nan 0.1000 0.0135
## 4 1.2442 nan 0.1000 0.0107
## 5 1.2189 nan 0.1000 0.0110
## 6 1.1991 nan 0.1000 0.0068
## 7 1.1814 nan 0.1000 0.0072
## 8 1.1640 nan 0.1000 0.0076
## 9 1.1508 nan 0.1000 0.0037
## 10 1.1375 nan 0.1000 0.0040
## 20 1.0272 nan 0.1000 0.0015
## 40 0.8784 nan 0.1000 0.0015
## 60 0.7767 nan 0.1000 0.0022
## 80 0.7023 nan 0.1000 -0.0015
## 100 0.6424 nan 0.1000 -0.0001
## 120 0.5906 nan 0.1000 0.0006
## 140 0.5475 nan 0.1000 -0.0002
## 160 0.5108 nan 0.1000 -0.0005
## 180 0.4748 nan 0.1000 -0.0004
## 200 0.4456 nan 0.1000 -0.0002
## 220 0.4186 nan 0.1000 0.0001
## 240 0.3929 nan 0.1000 -0.0005
## 260 0.3702 nan 0.1000 -0.0004
## 280 0.3481 nan 0.1000 -0.0004
## 300 0.3280 nan 0.1000 0.0001
## 320 0.3114 nan 0.1000 -0.0006
## 340 0.2954 nan 0.1000 -0.0002
## 360 0.2792 nan 0.1000 -0.0003
## 380 0.2637 nan 0.1000 -0.0002
## 400 0.2499 nan 0.1000 -0.0004
## 420 0.2357 nan 0.1000 -0.0002
## 440 0.2242 nan 0.1000 -0.0003
## 460 0.2127 nan 0.1000 -0.0004
## 480 0.2018 nan 0.1000 -0.0001
## 500 0.1914 nan 0.1000 -0.0003
##
## - Fold07: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## + Fold07: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3289 nan 0.1000 0.0157
## 2 1.2984 nan 0.1000 0.0132
## 3 1.2697 nan 0.1000 0.0111
## 4 1.2456 nan 0.1000 0.0100
## 5 1.2242 nan 0.1000 0.0083
## 6 1.2055 nan 0.1000 0.0069
## 7 1.1829 nan 0.1000 0.0097
## 8 1.1650 nan 0.1000 0.0071
## 9 1.1448 nan 0.1000 0.0084
## 10 1.1317 nan 0.1000 0.0030
## 20 1.0043 nan 0.1000 0.0036
## 40 0.8523 nan 0.1000 0.0004
## 60 0.7524 nan 0.1000 0.0015
## 80 0.6769 nan 0.1000 0.0003
## 100 0.6158 nan 0.1000 -0.0003
## 120 0.5678 nan 0.1000 -0.0001
## 140 0.5191 nan 0.1000 0.0001
## 160 0.4790 nan 0.1000 -0.0000
## 180 0.4429 nan 0.1000 -0.0004
## 200 0.4118 nan 0.1000 -0.0003
## 220 0.3859 nan 0.1000 -0.0003
## 240 0.3584 nan 0.1000 -0.0004
## 260 0.3348 nan 0.1000 -0.0005
## 280 0.3131 nan 0.1000 0.0002
## 300 0.2929 nan 0.1000 -0.0001
## 320 0.2742 nan 0.1000 -0.0002
## 340 0.2573 nan 0.1000 -0.0002
## 360 0.2410 nan 0.1000 -0.0004
## 380 0.2272 nan 0.1000 -0.0003
## 400 0.2147 nan 0.1000 -0.0003
## 420 0.2014 nan 0.1000 -0.0001
## 440 0.1887 nan 0.1000 -0.0002
## 460 0.1775 nan 0.1000 -0.0002
## 480 0.1664 nan 0.1000 -0.0001
## 500 0.1584 nan 0.1000 -0.0002
##
## - Fold07: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## + Fold08: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3494 nan 0.1000 0.0076
## 2 1.3353 nan 0.1000 0.0066
## 3 1.3246 nan 0.1000 0.0053
## 4 1.3136 nan 0.1000 0.0043
## 5 1.3034 nan 0.1000 0.0043
## 6 1.2949 nan 0.1000 0.0030
## 7 1.2871 nan 0.1000 0.0034
## 8 1.2800 nan 0.1000 0.0031
## 9 1.2748 nan 0.1000 0.0022
## 10 1.2686 nan 0.1000 0.0028
## 20 1.2266 nan 0.1000 0.0014
## 40 1.1849 nan 0.1000 0.0003
## 60 1.1583 nan 0.1000 0.0002
## 80 1.1387 nan 0.1000 -0.0003
## 100 1.1226 nan 0.1000 -0.0002
## 120 1.1089 nan 0.1000 0.0001
## 140 1.0971 nan 0.1000 0.0000
## 160 1.0870 nan 0.1000 -0.0002
## 180 1.0773 nan 0.1000 0.0000
## 200 1.0691 nan 0.1000 -0.0002
## 220 1.0594 nan 0.1000 -0.0001
## 240 1.0513 nan 0.1000 -0.0002
## 260 1.0446 nan 0.1000 -0.0001
## 280 1.0365 nan 0.1000 -0.0003
## 300 1.0296 nan 0.1000 -0.0001
## 320 1.0236 nan 0.1000 -0.0001
## 340 1.0168 nan 0.1000 0.0001
## 360 1.0105 nan 0.1000 -0.0002
## 380 1.0053 nan 0.1000 -0.0004
## 400 1.0000 nan 0.1000 -0.0000
## 420 0.9938 nan 0.1000 -0.0004
## 440 0.9888 nan 0.1000 -0.0002
## 460 0.9846 nan 0.1000 -0.0003
## 480 0.9792 nan 0.1000 -0.0003
## 500 0.9738 nan 0.1000 -0.0001
##
## - Fold08: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## + Fold08: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3454 nan 0.1000 0.0102
## 2 1.3294 nan 0.1000 0.0079
## 3 1.3122 nan 0.1000 0.0083
## 4 1.2990 nan 0.1000 0.0064
## 5 1.2855 nan 0.1000 0.0064
## 6 1.2743 nan 0.1000 0.0054
## 7 1.2634 nan 0.1000 0.0042
## 8 1.2556 nan 0.1000 0.0037
## 9 1.2472 nan 0.1000 0.0032
## 10 1.2392 nan 0.1000 0.0027
## 20 1.1768 nan 0.1000 0.0017
## 40 1.1021 nan 0.1000 0.0008
## 60 1.0612 nan 0.1000 0.0002
## 80 1.0234 nan 0.1000 0.0003
## 100 0.9909 nan 0.1000 -0.0002
## 120 0.9648 nan 0.1000 -0.0001
## 140 0.9373 nan 0.1000 -0.0001
## 160 0.9125 nan 0.1000 -0.0008
## 180 0.8923 nan 0.1000 0.0007
## 200 0.8743 nan 0.1000 -0.0004
## 220 0.8574 nan 0.1000 -0.0004
## 240 0.8425 nan 0.1000 -0.0004
## 260 0.8314 nan 0.1000 0.0000
## 280 0.8168 nan 0.1000 0.0000
## 300 0.8037 nan 0.1000 -0.0001
## 320 0.7923 nan 0.1000 -0.0003
## 340 0.7823 nan 0.1000 -0.0003
## 360 0.7719 nan 0.1000 -0.0003
## 380 0.7629 nan 0.1000 -0.0004
## 400 0.7530 nan 0.1000 -0.0003
## 420 0.7426 nan 0.1000 0.0001
## 440 0.7326 nan 0.1000 -0.0001
## 460 0.7269 nan 0.1000 -0.0002
## 480 0.7187 nan 0.1000 -0.0002
## 500 0.7086 nan 0.1000 -0.0001
##
## - Fold08: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## + Fold08: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3383 nan 0.1000 0.0128
## 2 1.3163 nan 0.1000 0.0104
## 3 1.3013 nan 0.1000 0.0066
## 4 1.2830 nan 0.1000 0.0085
## 5 1.2666 nan 0.1000 0.0064
## 6 1.2524 nan 0.1000 0.0060
## 7 1.2391 nan 0.1000 0.0054
## 8 1.2291 nan 0.1000 0.0040
## 9 1.2179 nan 0.1000 0.0055
## 10 1.2087 nan 0.1000 0.0026
## 20 1.1371 nan 0.1000 0.0024
## 40 1.0617 nan 0.1000 0.0006
## 60 0.9948 nan 0.1000 0.0011
## 80 0.9491 nan 0.1000 0.0009
## 100 0.9183 nan 0.1000 -0.0002
## 120 0.8841 nan 0.1000 0.0010
## 140 0.8568 nan 0.1000 0.0005
## 160 0.8313 nan 0.1000 0.0002
## 180 0.8055 nan 0.1000 -0.0004
## 200 0.7817 nan 0.1000 -0.0004
## 220 0.7620 nan 0.1000 -0.0005
## 240 0.7432 nan 0.1000 -0.0006
## 260 0.7247 nan 0.1000 -0.0001
## 280 0.7093 nan 0.1000 -0.0003
## 300 0.6942 nan 0.1000 -0.0003
## 320 0.6800 nan 0.1000 -0.0001
## 340 0.6649 nan 0.1000 0.0002
## 360 0.6510 nan 0.1000 -0.0003
## 380 0.6398 nan 0.1000 -0.0002
## 400 0.6286 nan 0.1000 -0.0000
## 420 0.6166 nan 0.1000 -0.0002
## 440 0.6060 nan 0.1000 -0.0004
## 460 0.5926 nan 0.1000 -0.0002
## 480 0.5832 nan 0.1000 -0.0002
## 500 0.5742 nan 0.1000 -0.0004
##
## - Fold08: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## + Fold08: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3358 nan 0.1000 0.0141
## 2 1.3115 nan 0.1000 0.0118
## 3 1.2904 nan 0.1000 0.0107
## 4 1.2714 nan 0.1000 0.0083
## 5 1.2558 nan 0.1000 0.0076
## 6 1.2415 nan 0.1000 0.0062
## 7 1.2293 nan 0.1000 0.0050
## 8 1.2174 nan 0.1000 0.0052
## 9 1.2072 nan 0.1000 0.0041
## 10 1.1939 nan 0.1000 0.0059
## 20 1.1103 nan 0.1000 0.0008
## 40 1.0015 nan 0.1000 0.0019
## 60 0.9395 nan 0.1000 0.0014
## 80 0.8887 nan 0.1000 -0.0002
## 100 0.8444 nan 0.1000 0.0006
## 120 0.8115 nan 0.1000 0.0006
## 140 0.7798 nan 0.1000 0.0007
## 160 0.7536 nan 0.1000 0.0003
## 180 0.7299 nan 0.1000 -0.0000
## 200 0.7063 nan 0.1000 -0.0000
## 220 0.6825 nan 0.1000 -0.0001
## 240 0.6631 nan 0.1000 -0.0002
## 260 0.6400 nan 0.1000 0.0003
## 280 0.6221 nan 0.1000 -0.0003
## 300 0.6073 nan 0.1000 -0.0005
## 320 0.5891 nan 0.1000 -0.0005
## 340 0.5748 nan 0.1000 -0.0007
## 360 0.5608 nan 0.1000 -0.0005
## 380 0.5476 nan 0.1000 -0.0005
## 400 0.5333 nan 0.1000 -0.0005
## 420 0.5207 nan 0.1000 -0.0001
## 440 0.5087 nan 0.1000 -0.0005
## 460 0.4963 nan 0.1000 -0.0007
## 480 0.4844 nan 0.1000 -0.0002
## 500 0.4733 nan 0.1000 -0.0010
##
## - Fold08: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## + Fold08: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3312 nan 0.1000 0.0152
## 2 1.3015 nan 0.1000 0.0127
## 3 1.2795 nan 0.1000 0.0095
## 4 1.2599 nan 0.1000 0.0085
## 5 1.2421 nan 0.1000 0.0074
## 6 1.2267 nan 0.1000 0.0057
## 7 1.2089 nan 0.1000 0.0083
## 8 1.1936 nan 0.1000 0.0068
## 9 1.1802 nan 0.1000 0.0062
## 10 1.1687 nan 0.1000 0.0048
## 20 1.0854 nan 0.1000 0.0025
## 40 0.9795 nan 0.1000 0.0002
## 60 0.8986 nan 0.1000 0.0004
## 80 0.8460 nan 0.1000 0.0004
## 100 0.7988 nan 0.1000 0.0012
## 120 0.7602 nan 0.1000 0.0004
## 140 0.7216 nan 0.1000 -0.0004
## 160 0.6896 nan 0.1000 -0.0002
## 180 0.6576 nan 0.1000 0.0006
## 200 0.6313 nan 0.1000 -0.0003
## 220 0.6090 nan 0.1000 -0.0005
## 240 0.5850 nan 0.1000 -0.0002
## 260 0.5644 nan 0.1000 -0.0003
## 280 0.5445 nan 0.1000 0.0002
## 300 0.5261 nan 0.1000 -0.0002
## 320 0.5094 nan 0.1000 -0.0001
## 340 0.4902 nan 0.1000 -0.0004
## 360 0.4754 nan 0.1000 -0.0003
## 380 0.4590 nan 0.1000 -0.0001
## 400 0.4443 nan 0.1000 -0.0001
## 420 0.4307 nan 0.1000 -0.0000
## 440 0.4170 nan 0.1000 -0.0004
## 460 0.4053 nan 0.1000 -0.0003
## 480 0.3924 nan 0.1000 0.0000
## 500 0.3813 nan 0.1000 -0.0002
##
## - Fold08: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## + Fold08: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3322 nan 0.1000 0.0160
## 2 1.3068 nan 0.1000 0.0103
## 3 1.2836 nan 0.1000 0.0097
## 4 1.2615 nan 0.1000 0.0093
## 5 1.2442 nan 0.1000 0.0074
## 6 1.2284 nan 0.1000 0.0067
## 7 1.2134 nan 0.1000 0.0068
## 8 1.1971 nan 0.1000 0.0068
## 9 1.1815 nan 0.1000 0.0071
## 10 1.1688 nan 0.1000 0.0050
## 20 1.0801 nan 0.1000 0.0023
## 40 0.9479 nan 0.1000 0.0015
## 60 0.8657 nan 0.1000 0.0001
## 80 0.8073 nan 0.1000 0.0005
## 100 0.7556 nan 0.1000 -0.0000
## 120 0.7079 nan 0.1000 -0.0000
## 140 0.6670 nan 0.1000 -0.0002
## 160 0.6344 nan 0.1000 -0.0004
## 180 0.6051 nan 0.1000 -0.0002
## 200 0.5769 nan 0.1000 -0.0007
## 220 0.5544 nan 0.1000 -0.0004
## 240 0.5315 nan 0.1000 -0.0005
## 260 0.5088 nan 0.1000 -0.0003
## 280 0.4884 nan 0.1000 0.0001
## 300 0.4689 nan 0.1000 -0.0001
## 320 0.4527 nan 0.1000 0.0000
## 340 0.4366 nan 0.1000 -0.0003
## 360 0.4191 nan 0.1000 -0.0003
## 380 0.4046 nan 0.1000 -0.0004
## 400 0.3905 nan 0.1000 -0.0002
## 420 0.3759 nan 0.1000 -0.0002
## 440 0.3622 nan 0.1000 -0.0004
## 460 0.3488 nan 0.1000 -0.0002
## 480 0.3363 nan 0.1000 -0.0003
## 500 0.3255 nan 0.1000 -0.0002
##
## - Fold08: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## + Fold08: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3294 nan 0.1000 0.0172
## 2 1.2979 nan 0.1000 0.0140
## 3 1.2693 nan 0.1000 0.0127
## 4 1.2496 nan 0.1000 0.0090
## 5 1.2280 nan 0.1000 0.0084
## 6 1.2082 nan 0.1000 0.0075
## 7 1.1899 nan 0.1000 0.0093
## 8 1.1724 nan 0.1000 0.0075
## 9 1.1595 nan 0.1000 0.0051
## 10 1.1467 nan 0.1000 0.0051
## 20 1.0396 nan 0.1000 0.0026
## 40 0.9012 nan 0.1000 0.0018
## 60 0.8181 nan 0.1000 -0.0005
## 80 0.7545 nan 0.1000 -0.0003
## 100 0.7025 nan 0.1000 -0.0000
## 120 0.6540 nan 0.1000 -0.0001
## 140 0.6165 nan 0.1000 -0.0001
## 160 0.5827 nan 0.1000 0.0001
## 180 0.5520 nan 0.1000 -0.0003
## 200 0.5213 nan 0.1000 -0.0002
## 220 0.4928 nan 0.1000 -0.0007
## 240 0.4666 nan 0.1000 -0.0003
## 260 0.4469 nan 0.1000 -0.0005
## 280 0.4257 nan 0.1000 -0.0005
## 300 0.4045 nan 0.1000 -0.0004
## 320 0.3871 nan 0.1000 -0.0004
## 340 0.3682 nan 0.1000 -0.0001
## 360 0.3514 nan 0.1000 -0.0005
## 380 0.3355 nan 0.1000 -0.0002
## 400 0.3211 nan 0.1000 -0.0003
## 420 0.3071 nan 0.1000 -0.0002
## 440 0.2944 nan 0.1000 -0.0001
## 460 0.2815 nan 0.1000 -0.0003
## 480 0.2687 nan 0.1000 -0.0002
## 500 0.2576 nan 0.1000 0.0001
##
## - Fold08: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## + Fold08: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3295 nan 0.1000 0.0169
## 2 1.2969 nan 0.1000 0.0142
## 3 1.2679 nan 0.1000 0.0139
## 4 1.2438 nan 0.1000 0.0097
## 5 1.2226 nan 0.1000 0.0090
## 6 1.2063 nan 0.1000 0.0070
## 7 1.1878 nan 0.1000 0.0075
## 8 1.1714 nan 0.1000 0.0057
## 9 1.1567 nan 0.1000 0.0061
## 10 1.1417 nan 0.1000 0.0059
## 20 1.0353 nan 0.1000 0.0016
## 40 0.8992 nan 0.1000 0.0018
## 60 0.8123 nan 0.1000 0.0005
## 80 0.7392 nan 0.1000 -0.0005
## 100 0.6842 nan 0.1000 -0.0000
## 120 0.6346 nan 0.1000 0.0008
## 140 0.5954 nan 0.1000 -0.0002
## 160 0.5568 nan 0.1000 -0.0004
## 180 0.5265 nan 0.1000 -0.0001
## 200 0.4963 nan 0.1000 -0.0002
## 220 0.4697 nan 0.1000 0.0002
## 240 0.4421 nan 0.1000 -0.0003
## 260 0.4173 nan 0.1000 -0.0001
## 280 0.3960 nan 0.1000 -0.0001
## 300 0.3736 nan 0.1000 0.0001
## 320 0.3549 nan 0.1000 -0.0003
## 340 0.3376 nan 0.1000 -0.0002
## 360 0.3202 nan 0.1000 -0.0000
## 380 0.3047 nan 0.1000 -0.0001
## 400 0.2914 nan 0.1000 -0.0003
## 420 0.2776 nan 0.1000 -0.0003
## 440 0.2635 nan 0.1000 -0.0003
## 460 0.2520 nan 0.1000 -0.0004
## 480 0.2415 nan 0.1000 -0.0001
## 500 0.2292 nan 0.1000 -0.0002
##
## - Fold08: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## + Fold08: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3264 nan 0.1000 0.0194
## 2 1.2956 nan 0.1000 0.0132
## 3 1.2675 nan 0.1000 0.0134
## 4 1.2389 nan 0.1000 0.0128
## 5 1.2183 nan 0.1000 0.0077
## 6 1.1988 nan 0.1000 0.0083
## 7 1.1786 nan 0.1000 0.0088
## 8 1.1601 nan 0.1000 0.0079
## 9 1.1433 nan 0.1000 0.0074
## 10 1.1291 nan 0.1000 0.0056
## 20 1.0156 nan 0.1000 0.0031
## 40 0.8824 nan 0.1000 0.0006
## 60 0.7844 nan 0.1000 0.0012
## 80 0.7079 nan 0.1000 -0.0001
## 100 0.6535 nan 0.1000 0.0000
## 120 0.6019 nan 0.1000 0.0002
## 140 0.5544 nan 0.1000 -0.0004
## 160 0.5188 nan 0.1000 -0.0006
## 180 0.4879 nan 0.1000 -0.0001
## 200 0.4548 nan 0.1000 0.0002
## 220 0.4279 nan 0.1000 -0.0001
## 240 0.4030 nan 0.1000 -0.0001
## 260 0.3791 nan 0.1000 -0.0007
## 280 0.3571 nan 0.1000 -0.0004
## 300 0.3361 nan 0.1000 -0.0001
## 320 0.3171 nan 0.1000 -0.0002
## 340 0.3003 nan 0.1000 -0.0005
## 360 0.2858 nan 0.1000 -0.0006
## 380 0.2713 nan 0.1000 -0.0002
## 400 0.2564 nan 0.1000 -0.0005
## 420 0.2425 nan 0.1000 -0.0002
## 440 0.2298 nan 0.1000 -0.0002
## 460 0.2165 nan 0.1000 -0.0003
## 480 0.2055 nan 0.1000 -0.0003
## 500 0.1947 nan 0.1000 -0.0000
##
## - Fold08: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## + Fold08: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3286 nan 0.1000 0.0171
## 2 1.2945 nan 0.1000 0.0155
## 3 1.2636 nan 0.1000 0.0143
## 4 1.2383 nan 0.1000 0.0108
## 5 1.2141 nan 0.1000 0.0096
## 6 1.1906 nan 0.1000 0.0090
## 7 1.1706 nan 0.1000 0.0066
## 8 1.1494 nan 0.1000 0.0081
## 9 1.1282 nan 0.1000 0.0088
## 10 1.1102 nan 0.1000 0.0063
## 20 0.9967 nan 0.1000 0.0018
## 40 0.8419 nan 0.1000 0.0030
## 60 0.7380 nan 0.1000 -0.0002
## 80 0.6617 nan 0.1000 0.0004
## 100 0.6024 nan 0.1000 0.0000
## 120 0.5511 nan 0.1000 -0.0003
## 140 0.5088 nan 0.1000 0.0001
## 160 0.4682 nan 0.1000 -0.0005
## 180 0.4323 nan 0.1000 -0.0007
## 200 0.4018 nan 0.1000 -0.0004
## 220 0.3710 nan 0.1000 -0.0004
## 240 0.3463 nan 0.1000 -0.0005
## 260 0.3238 nan 0.1000 -0.0002
## 280 0.3014 nan 0.1000 -0.0002
## 300 0.2823 nan 0.1000 -0.0003
## 320 0.2653 nan 0.1000 -0.0003
## 340 0.2492 nan 0.1000 -0.0004
## 360 0.2348 nan 0.1000 -0.0001
## 380 0.2209 nan 0.1000 -0.0001
## 400 0.2077 nan 0.1000 -0.0003
## 420 0.1956 nan 0.1000 -0.0001
## 440 0.1850 nan 0.1000 -0.0003
## 460 0.1746 nan 0.1000 -0.0002
## 480 0.1642 nan 0.1000 -0.0003
## 500 0.1539 nan 0.1000 -0.0002
##
## - Fold08: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## + Fold09: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3473 nan 0.1000 0.0083
## 2 1.3326 nan 0.1000 0.0062
## 3 1.3181 nan 0.1000 0.0068
## 4 1.3061 nan 0.1000 0.0055
## 5 1.2970 nan 0.1000 0.0042
## 6 1.2883 nan 0.1000 0.0037
## 7 1.2800 nan 0.1000 0.0040
## 8 1.2730 nan 0.1000 0.0028
## 9 1.2674 nan 0.1000 0.0020
## 10 1.2612 nan 0.1000 0.0026
## 20 1.2186 nan 0.1000 0.0013
## 40 1.1763 nan 0.1000 0.0002
## 60 1.1498 nan 0.1000 0.0005
## 80 1.1300 nan 0.1000 0.0001
## 100 1.1161 nan 0.1000 -0.0004
## 120 1.1030 nan 0.1000 -0.0002
## 140 1.0934 nan 0.1000 -0.0005
## 160 1.0843 nan 0.1000 -0.0002
## 180 1.0753 nan 0.1000 0.0000
## 200 1.0674 nan 0.1000 -0.0003
## 220 1.0587 nan 0.1000 0.0001
## 240 1.0511 nan 0.1000 -0.0002
## 260 1.0448 nan 0.1000 -0.0005
## 280 1.0379 nan 0.1000 -0.0002
## 300 1.0317 nan 0.1000 -0.0002
## 320 1.0258 nan 0.1000 -0.0002
## 340 1.0196 nan 0.1000 -0.0003
## 360 1.0148 nan 0.1000 -0.0003
## 380 1.0079 nan 0.1000 -0.0002
## 400 1.0034 nan 0.1000 -0.0002
## 420 0.9983 nan 0.1000 -0.0002
## 440 0.9939 nan 0.1000 -0.0003
## 460 0.9884 nan 0.1000 -0.0002
## 480 0.9833 nan 0.1000 0.0000
## 500 0.9785 nan 0.1000 -0.0000
##
## - Fold09: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## + Fold09: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3399 nan 0.1000 0.0120
## 2 1.3211 nan 0.1000 0.0093
## 3 1.3045 nan 0.1000 0.0083
## 4 1.2915 nan 0.1000 0.0053
## 5 1.2791 nan 0.1000 0.0056
## 6 1.2686 nan 0.1000 0.0046
## 7 1.2584 nan 0.1000 0.0042
## 8 1.2509 nan 0.1000 0.0034
## 9 1.2422 nan 0.1000 0.0036
## 10 1.2344 nan 0.1000 0.0032
## 20 1.1713 nan 0.1000 0.0022
## 40 1.0947 nan 0.1000 0.0005
## 60 1.0413 nan 0.1000 0.0004
## 80 1.0042 nan 0.1000 0.0013
## 100 0.9752 nan 0.1000 0.0009
## 120 0.9435 nan 0.1000 0.0006
## 140 0.9156 nan 0.1000 0.0002
## 160 0.8977 nan 0.1000 0.0000
## 180 0.8814 nan 0.1000 -0.0000
## 200 0.8623 nan 0.1000 -0.0002
## 220 0.8468 nan 0.1000 0.0006
## 240 0.8332 nan 0.1000 -0.0002
## 260 0.8177 nan 0.1000 0.0001
## 280 0.8059 nan 0.1000 -0.0005
## 300 0.7934 nan 0.1000 -0.0004
## 320 0.7794 nan 0.1000 -0.0004
## 340 0.7660 nan 0.1000 -0.0002
## 360 0.7549 nan 0.1000 0.0004
## 380 0.7428 nan 0.1000 -0.0004
## 400 0.7326 nan 0.1000 -0.0004
## 420 0.7241 nan 0.1000 -0.0004
## 440 0.7158 nan 0.1000 -0.0003
## 460 0.7072 nan 0.1000 -0.0004
## 480 0.6993 nan 0.1000 -0.0003
## 500 0.6907 nan 0.1000 -0.0001
##
## - Fold09: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## + Fold09: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3388 nan 0.1000 0.0128
## 2 1.3118 nan 0.1000 0.0126
## 3 1.2939 nan 0.1000 0.0084
## 4 1.2770 nan 0.1000 0.0074
## 5 1.2633 nan 0.1000 0.0062
## 6 1.2483 nan 0.1000 0.0073
## 7 1.2360 nan 0.1000 0.0053
## 8 1.2232 nan 0.1000 0.0063
## 9 1.2130 nan 0.1000 0.0040
## 10 1.2026 nan 0.1000 0.0046
## 20 1.1252 nan 0.1000 0.0021
## 40 1.0314 nan 0.1000 0.0025
## 60 0.9765 nan 0.1000 0.0008
## 80 0.9306 nan 0.1000 0.0004
## 100 0.8942 nan 0.1000 -0.0003
## 120 0.8640 nan 0.1000 -0.0001
## 140 0.8386 nan 0.1000 0.0000
## 160 0.8156 nan 0.1000 0.0001
## 180 0.7945 nan 0.1000 -0.0005
## 200 0.7727 nan 0.1000 -0.0001
## 220 0.7524 nan 0.1000 -0.0004
## 240 0.7361 nan 0.1000 -0.0004
## 260 0.7200 nan 0.1000 -0.0009
## 280 0.7030 nan 0.1000 -0.0002
## 300 0.6893 nan 0.1000 -0.0006
## 320 0.6747 nan 0.1000 -0.0001
## 340 0.6596 nan 0.1000 -0.0004
## 360 0.6473 nan 0.1000 -0.0002
## 380 0.6357 nan 0.1000 -0.0003
## 400 0.6251 nan 0.1000 -0.0001
## 420 0.6135 nan 0.1000 -0.0002
## 440 0.6025 nan 0.1000 -0.0003
## 460 0.5936 nan 0.1000 -0.0001
## 480 0.5838 nan 0.1000 -0.0002
## 500 0.5741 nan 0.1000 -0.0003
##
## - Fold09: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## + Fold09: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3311 nan 0.1000 0.0133
## 2 1.3050 nan 0.1000 0.0118
## 3 1.2812 nan 0.1000 0.0104
## 4 1.2638 nan 0.1000 0.0076
## 5 1.2482 nan 0.1000 0.0069
## 6 1.2321 nan 0.1000 0.0072
## 7 1.2199 nan 0.1000 0.0055
## 8 1.2042 nan 0.1000 0.0069
## 9 1.1916 nan 0.1000 0.0052
## 10 1.1808 nan 0.1000 0.0038
## 20 1.0891 nan 0.1000 0.0017
## 40 0.9842 nan 0.1000 0.0020
## 60 0.9084 nan 0.1000 0.0012
## 80 0.8555 nan 0.1000 0.0006
## 100 0.8203 nan 0.1000 -0.0001
## 120 0.7824 nan 0.1000 0.0007
## 140 0.7531 nan 0.1000 -0.0003
## 160 0.7212 nan 0.1000 -0.0004
## 180 0.6956 nan 0.1000 -0.0003
## 200 0.6739 nan 0.1000 0.0004
## 220 0.6545 nan 0.1000 -0.0005
## 240 0.6335 nan 0.1000 -0.0001
## 260 0.6155 nan 0.1000 -0.0002
## 280 0.5988 nan 0.1000 -0.0003
## 300 0.5820 nan 0.1000 -0.0002
## 320 0.5680 nan 0.1000 -0.0004
## 340 0.5526 nan 0.1000 -0.0003
## 360 0.5359 nan 0.1000 -0.0003
## 380 0.5212 nan 0.1000 -0.0001
## 400 0.5078 nan 0.1000 -0.0006
## 420 0.4964 nan 0.1000 -0.0005
## 440 0.4857 nan 0.1000 -0.0002
## 460 0.4720 nan 0.1000 -0.0003
## 480 0.4613 nan 0.1000 -0.0005
## 500 0.4499 nan 0.1000 -0.0004
##
## - Fold09: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## + Fold09: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3345 nan 0.1000 0.0143
## 2 1.3042 nan 0.1000 0.0139
## 3 1.2791 nan 0.1000 0.0103
## 4 1.2582 nan 0.1000 0.0091
## 5 1.2392 nan 0.1000 0.0076
## 6 1.2213 nan 0.1000 0.0073
## 7 1.2045 nan 0.1000 0.0078
## 8 1.1913 nan 0.1000 0.0055
## 9 1.1784 nan 0.1000 0.0052
## 10 1.1653 nan 0.1000 0.0056
## 20 1.0714 nan 0.1000 0.0042
## 40 0.9441 nan 0.1000 0.0020
## 60 0.8694 nan 0.1000 0.0000
## 80 0.8165 nan 0.1000 0.0000
## 100 0.7685 nan 0.1000 -0.0001
## 120 0.7264 nan 0.1000 -0.0001
## 140 0.6904 nan 0.1000 -0.0005
## 160 0.6639 nan 0.1000 0.0004
## 180 0.6351 nan 0.1000 -0.0001
## 200 0.6099 nan 0.1000 0.0006
## 220 0.5873 nan 0.1000 -0.0005
## 240 0.5643 nan 0.1000 0.0001
## 260 0.5458 nan 0.1000 -0.0005
## 280 0.5287 nan 0.1000 -0.0004
## 300 0.5092 nan 0.1000 0.0004
## 320 0.4921 nan 0.1000 -0.0003
## 340 0.4744 nan 0.1000 0.0001
## 360 0.4574 nan 0.1000 -0.0003
## 380 0.4441 nan 0.1000 -0.0001
## 400 0.4291 nan 0.1000 -0.0001
## 420 0.4148 nan 0.1000 -0.0003
## 440 0.4031 nan 0.1000 -0.0004
## 460 0.3916 nan 0.1000 -0.0002
## 480 0.3803 nan 0.1000 -0.0003
## 500 0.3697 nan 0.1000 -0.0004
##
## - Fold09: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## + Fold09: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3310 nan 0.1000 0.0155
## 2 1.3012 nan 0.1000 0.0132
## 3 1.2771 nan 0.1000 0.0107
## 4 1.2546 nan 0.1000 0.0102
## 5 1.2340 nan 0.1000 0.0085
## 6 1.2165 nan 0.1000 0.0075
## 7 1.1999 nan 0.1000 0.0075
## 8 1.1844 nan 0.1000 0.0067
## 9 1.1691 nan 0.1000 0.0069
## 10 1.1549 nan 0.1000 0.0062
## 20 1.0623 nan 0.1000 0.0029
## 40 0.9460 nan 0.1000 0.0010
## 60 0.8647 nan 0.1000 -0.0000
## 80 0.7998 nan 0.1000 -0.0001
## 100 0.7460 nan 0.1000 -0.0002
## 120 0.7076 nan 0.1000 -0.0003
## 140 0.6715 nan 0.1000 -0.0000
## 160 0.6356 nan 0.1000 0.0003
## 180 0.6025 nan 0.1000 -0.0004
## 200 0.5710 nan 0.1000 0.0002
## 220 0.5450 nan 0.1000 -0.0004
## 240 0.5241 nan 0.1000 -0.0008
## 260 0.5005 nan 0.1000 -0.0005
## 280 0.4794 nan 0.1000 -0.0002
## 300 0.4607 nan 0.1000 -0.0001
## 320 0.4447 nan 0.1000 -0.0003
## 340 0.4282 nan 0.1000 -0.0002
## 360 0.4116 nan 0.1000 -0.0000
## 380 0.3968 nan 0.1000 -0.0007
## 400 0.3835 nan 0.1000 -0.0004
## 420 0.3677 nan 0.1000 -0.0003
## 440 0.3543 nan 0.1000 -0.0002
## 460 0.3410 nan 0.1000 -0.0003
## 480 0.3267 nan 0.1000 -0.0002
## 500 0.3155 nan 0.1000 -0.0001
##
## - Fold09: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## + Fold09: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3302 nan 0.1000 0.0163
## 2 1.2978 nan 0.1000 0.0142
## 3 1.2685 nan 0.1000 0.0132
## 4 1.2460 nan 0.1000 0.0098
## 5 1.2259 nan 0.1000 0.0076
## 6 1.2071 nan 0.1000 0.0073
## 7 1.1892 nan 0.1000 0.0075
## 8 1.1709 nan 0.1000 0.0078
## 9 1.1555 nan 0.1000 0.0057
## 10 1.1411 nan 0.1000 0.0057
## 20 1.0279 nan 0.1000 0.0029
## 40 0.8925 nan 0.1000 0.0058
## 60 0.8030 nan 0.1000 0.0010
## 80 0.7395 nan 0.1000 -0.0001
## 100 0.6891 nan 0.1000 0.0002
## 120 0.6409 nan 0.1000 -0.0002
## 140 0.6036 nan 0.1000 -0.0004
## 160 0.5701 nan 0.1000 -0.0002
## 180 0.5421 nan 0.1000 -0.0012
## 200 0.5134 nan 0.1000 -0.0003
## 220 0.4872 nan 0.1000 0.0001
## 240 0.4632 nan 0.1000 0.0001
## 260 0.4406 nan 0.1000 0.0002
## 280 0.4208 nan 0.1000 -0.0005
## 300 0.4012 nan 0.1000 -0.0004
## 320 0.3830 nan 0.1000 -0.0000
## 340 0.3660 nan 0.1000 -0.0003
## 360 0.3497 nan 0.1000 -0.0001
## 380 0.3337 nan 0.1000 -0.0005
## 400 0.3202 nan 0.1000 -0.0002
## 420 0.3077 nan 0.1000 -0.0001
## 440 0.2952 nan 0.1000 -0.0004
## 460 0.2826 nan 0.1000 -0.0001
## 480 0.2715 nan 0.1000 -0.0003
## 500 0.2604 nan 0.1000 -0.0001
##
## - Fold09: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## + Fold09: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3295 nan 0.1000 0.0167
## 2 1.2970 nan 0.1000 0.0139
## 3 1.2681 nan 0.1000 0.0133
## 4 1.2402 nan 0.1000 0.0119
## 5 1.2152 nan 0.1000 0.0097
## 6 1.1942 nan 0.1000 0.0084
## 7 1.1763 nan 0.1000 0.0067
## 8 1.1620 nan 0.1000 0.0045
## 9 1.1477 nan 0.1000 0.0056
## 10 1.1331 nan 0.1000 0.0062
## 20 1.0191 nan 0.1000 0.0037
## 40 0.8899 nan 0.1000 0.0009
## 60 0.7997 nan 0.1000 0.0000
## 80 0.7285 nan 0.1000 0.0000
## 100 0.6714 nan 0.1000 -0.0004
## 120 0.6230 nan 0.1000 -0.0001
## 140 0.5823 nan 0.1000 0.0005
## 160 0.5453 nan 0.1000 0.0000
## 180 0.5117 nan 0.1000 -0.0001
## 200 0.4836 nan 0.1000 -0.0002
## 220 0.4565 nan 0.1000 -0.0006
## 240 0.4324 nan 0.1000 -0.0004
## 260 0.4079 nan 0.1000 -0.0002
## 280 0.3847 nan 0.1000 -0.0006
## 300 0.3650 nan 0.1000 -0.0003
## 320 0.3447 nan 0.1000 -0.0001
## 340 0.3262 nan 0.1000 -0.0004
## 360 0.3106 nan 0.1000 -0.0004
## 380 0.2965 nan 0.1000 -0.0004
## 400 0.2832 nan 0.1000 -0.0002
## 420 0.2689 nan 0.1000 -0.0002
## 440 0.2552 nan 0.1000 -0.0003
## 460 0.2445 nan 0.1000 -0.0002
## 480 0.2329 nan 0.1000 -0.0002
## 500 0.2217 nan 0.1000 -0.0002
##
## - Fold09: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## + Fold09: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3249 nan 0.1000 0.0189
## 2 1.2897 nan 0.1000 0.0154
## 3 1.2580 nan 0.1000 0.0143
## 4 1.2317 nan 0.1000 0.0110
## 5 1.2063 nan 0.1000 0.0102
## 6 1.1855 nan 0.1000 0.0081
## 7 1.1640 nan 0.1000 0.0087
## 8 1.1447 nan 0.1000 0.0081
## 9 1.1298 nan 0.1000 0.0056
## 10 1.1132 nan 0.1000 0.0069
## 20 0.9968 nan 0.1000 0.0025
## 40 0.8557 nan 0.1000 0.0015
## 60 0.7634 nan 0.1000 -0.0002
## 80 0.7024 nan 0.1000 -0.0005
## 100 0.6439 nan 0.1000 0.0003
## 120 0.5933 nan 0.1000 -0.0005
## 140 0.5506 nan 0.1000 -0.0002
## 160 0.5101 nan 0.1000 0.0004
## 180 0.4776 nan 0.1000 -0.0001
## 200 0.4434 nan 0.1000 0.0004
## 220 0.4144 nan 0.1000 -0.0001
## 240 0.3911 nan 0.1000 -0.0003
## 260 0.3693 nan 0.1000 -0.0004
## 280 0.3494 nan 0.1000 -0.0001
## 300 0.3303 nan 0.1000 -0.0003
## 320 0.3115 nan 0.1000 -0.0001
## 340 0.2940 nan 0.1000 -0.0003
## 360 0.2783 nan 0.1000 -0.0003
## 380 0.2631 nan 0.1000 -0.0002
## 400 0.2503 nan 0.1000 -0.0003
## 420 0.2344 nan 0.1000 -0.0003
## 440 0.2226 nan 0.1000 -0.0003
## 460 0.2107 nan 0.1000 -0.0003
## 480 0.2000 nan 0.1000 -0.0001
## 500 0.1891 nan 0.1000 -0.0002
##
## - Fold09: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## + Fold09: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3246 nan 0.1000 0.0192
## 2 1.2902 nan 0.1000 0.0145
## 3 1.2610 nan 0.1000 0.0115
## 4 1.2343 nan 0.1000 0.0112
## 5 1.2122 nan 0.1000 0.0085
## 6 1.1922 nan 0.1000 0.0080
## 7 1.1733 nan 0.1000 0.0071
## 8 1.1548 nan 0.1000 0.0054
## 9 1.1381 nan 0.1000 0.0068
## 10 1.1177 nan 0.1000 0.0089
## 20 0.9939 nan 0.1000 0.0052
## 40 0.8580 nan 0.1000 0.0007
## 60 0.7606 nan 0.1000 0.0013
## 80 0.6933 nan 0.1000 0.0002
## 100 0.6311 nan 0.1000 -0.0001
## 120 0.5813 nan 0.1000 -0.0006
## 140 0.5384 nan 0.1000 -0.0002
## 160 0.5001 nan 0.1000 -0.0009
## 180 0.4681 nan 0.1000 -0.0003
## 200 0.4375 nan 0.1000 -0.0008
## 220 0.4088 nan 0.1000 -0.0001
## 240 0.3831 nan 0.1000 -0.0007
## 260 0.3598 nan 0.1000 0.0000
## 280 0.3377 nan 0.1000 -0.0001
## 300 0.3186 nan 0.1000 -0.0004
## 320 0.2991 nan 0.1000 -0.0001
## 340 0.2803 nan 0.1000 -0.0004
## 360 0.2627 nan 0.1000 -0.0001
## 380 0.2474 nan 0.1000 -0.0003
## 400 0.2336 nan 0.1000 -0.0002
## 420 0.2198 nan 0.1000 -0.0002
## 440 0.2069 nan 0.1000 -0.0004
## 460 0.1965 nan 0.1000 -0.0003
## 480 0.1861 nan 0.1000 -0.0002
## 500 0.1761 nan 0.1000 -0.0002
##
## - Fold09: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## + Fold10: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3484 nan 0.1000 0.0088
## 2 1.3334 nan 0.1000 0.0076
## 3 1.3193 nan 0.1000 0.0066
## 4 1.3079 nan 0.1000 0.0052
## 5 1.2981 nan 0.1000 0.0043
## 6 1.2880 nan 0.1000 0.0043
## 7 1.2803 nan 0.1000 0.0037
## 8 1.2703 nan 0.1000 0.0045
## 9 1.2631 nan 0.1000 0.0035
## 10 1.2563 nan 0.1000 0.0030
## 20 1.2169 nan 0.1000 0.0009
## 40 1.1728 nan 0.1000 0.0003
## 60 1.1428 nan 0.1000 0.0001
## 80 1.1222 nan 0.1000 -0.0000
## 100 1.1069 nan 0.1000 0.0001
## 120 1.0918 nan 0.1000 -0.0001
## 140 1.0805 nan 0.1000 -0.0002
## 160 1.0709 nan 0.1000 -0.0002
## 180 1.0608 nan 0.1000 -0.0003
## 200 1.0533 nan 0.1000 -0.0001
## 220 1.0469 nan 0.1000 -0.0001
## 240 1.0407 nan 0.1000 -0.0003
## 260 1.0331 nan 0.1000 -0.0002
## 280 1.0258 nan 0.1000 -0.0000
## 300 1.0200 nan 0.1000 -0.0001
## 320 1.0145 nan 0.1000 -0.0001
## 340 1.0081 nan 0.1000 0.0001
## 360 1.0034 nan 0.1000 -0.0005
## 380 0.9977 nan 0.1000 -0.0004
## 400 0.9932 nan 0.1000 -0.0001
## 420 0.9889 nan 0.1000 -0.0002
## 440 0.9836 nan 0.1000 -0.0002
## 460 0.9787 nan 0.1000 -0.0003
## 480 0.9740 nan 0.1000 -0.0003
## 500 0.9694 nan 0.1000 -0.0001
##
## - Fold10: shrinkage=0.1, interaction.depth= 1, n.minobsinnode=10, n.trees=500
## + Fold10: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3439 nan 0.1000 0.0105
## 2 1.3259 nan 0.1000 0.0087
## 3 1.3099 nan 0.1000 0.0070
## 4 1.2962 nan 0.1000 0.0057
## 5 1.2842 nan 0.1000 0.0054
## 6 1.2750 nan 0.1000 0.0041
## 7 1.2637 nan 0.1000 0.0047
## 8 1.2537 nan 0.1000 0.0045
## 9 1.2450 nan 0.1000 0.0036
## 10 1.2374 nan 0.1000 0.0029
## 20 1.1778 nan 0.1000 0.0013
## 40 1.1037 nan 0.1000 0.0009
## 60 1.0533 nan 0.1000 0.0002
## 80 1.0186 nan 0.1000 -0.0002
## 100 0.9811 nan 0.1000 0.0005
## 120 0.9524 nan 0.1000 0.0002
## 140 0.9314 nan 0.1000 -0.0003
## 160 0.9076 nan 0.1000 0.0005
## 180 0.8925 nan 0.1000 -0.0001
## 200 0.8744 nan 0.1000 -0.0001
## 220 0.8552 nan 0.1000 -0.0001
## 240 0.8410 nan 0.1000 -0.0003
## 260 0.8246 nan 0.1000 -0.0003
## 280 0.8114 nan 0.1000 -0.0003
## 300 0.7987 nan 0.1000 0.0001
## 320 0.7852 nan 0.1000 -0.0004
## 340 0.7758 nan 0.1000 -0.0002
## 360 0.7644 nan 0.1000 -0.0003
## 380 0.7541 nan 0.1000 -0.0002
## 400 0.7443 nan 0.1000 -0.0001
## 420 0.7351 nan 0.1000 -0.0003
## 440 0.7251 nan 0.1000 -0.0001
## 460 0.7154 nan 0.1000 -0.0003
## 480 0.7067 nan 0.1000 -0.0000
## 500 0.6993 nan 0.1000 -0.0002
##
## - Fold10: shrinkage=0.1, interaction.depth= 2, n.minobsinnode=10, n.trees=500
## + Fold10: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3427 nan 0.1000 0.0112
## 2 1.3221 nan 0.1000 0.0105
## 3 1.3027 nan 0.1000 0.0088
## 4 1.2838 nan 0.1000 0.0080
## 5 1.2703 nan 0.1000 0.0051
## 6 1.2549 nan 0.1000 0.0070
## 7 1.2417 nan 0.1000 0.0060
## 8 1.2294 nan 0.1000 0.0049
## 9 1.2198 nan 0.1000 0.0039
## 10 1.2087 nan 0.1000 0.0045
## 20 1.1420 nan 0.1000 0.0016
## 40 1.0581 nan 0.1000 0.0014
## 60 0.9970 nan 0.1000 0.0001
## 80 0.9561 nan 0.1000 0.0002
## 100 0.9131 nan 0.1000 -0.0003
## 120 0.8792 nan 0.1000 0.0004
## 140 0.8494 nan 0.1000 0.0003
## 160 0.8206 nan 0.1000 -0.0005
## 180 0.8004 nan 0.1000 0.0005
## 200 0.7838 nan 0.1000 -0.0007
## 220 0.7625 nan 0.1000 -0.0003
## 240 0.7446 nan 0.1000 0.0002
## 260 0.7284 nan 0.1000 -0.0006
## 280 0.7127 nan 0.1000 -0.0006
## 300 0.6970 nan 0.1000 -0.0005
## 320 0.6824 nan 0.1000 -0.0005
## 340 0.6710 nan 0.1000 -0.0004
## 360 0.6575 nan 0.1000 -0.0004
## 380 0.6459 nan 0.1000 -0.0004
## 400 0.6329 nan 0.1000 -0.0002
## 420 0.6205 nan 0.1000 0.0002
## 440 0.6085 nan 0.1000 0.0002
## 460 0.5989 nan 0.1000 -0.0002
## 480 0.5877 nan 0.1000 -0.0002
## 500 0.5773 nan 0.1000 -0.0003
##
## - Fold10: shrinkage=0.1, interaction.depth= 3, n.minobsinnode=10, n.trees=500
## + Fold10: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3371 nan 0.1000 0.0132
## 2 1.3128 nan 0.1000 0.0115
## 3 1.2913 nan 0.1000 0.0098
## 4 1.2738 nan 0.1000 0.0076
## 5 1.2588 nan 0.1000 0.0059
## 6 1.2437 nan 0.1000 0.0072
## 7 1.2308 nan 0.1000 0.0056
## 8 1.2180 nan 0.1000 0.0042
## 9 1.2046 nan 0.1000 0.0056
## 10 1.1937 nan 0.1000 0.0048
## 20 1.1077 nan 0.1000 0.0032
## 40 1.0113 nan 0.1000 0.0007
## 60 0.9396 nan 0.1000 0.0007
## 80 0.8894 nan 0.1000 0.0003
## 100 0.8452 nan 0.1000 0.0010
## 120 0.8077 nan 0.1000 0.0009
## 140 0.7781 nan 0.1000 -0.0000
## 160 0.7477 nan 0.1000 0.0000
## 180 0.7167 nan 0.1000 0.0004
## 200 0.6913 nan 0.1000 -0.0001
## 220 0.6690 nan 0.1000 0.0000
## 240 0.6505 nan 0.1000 -0.0002
## 260 0.6307 nan 0.1000 -0.0004
## 280 0.6118 nan 0.1000 0.0000
## 300 0.5945 nan 0.1000 -0.0001
## 320 0.5790 nan 0.1000 -0.0003
## 340 0.5662 nan 0.1000 -0.0001
## 360 0.5528 nan 0.1000 -0.0002
## 380 0.5398 nan 0.1000 -0.0002
## 400 0.5249 nan 0.1000 0.0003
## 420 0.5127 nan 0.1000 -0.0005
## 440 0.4989 nan 0.1000 -0.0005
## 460 0.4875 nan 0.1000 -0.0001
## 480 0.4759 nan 0.1000 -0.0003
## 500 0.4656 nan 0.1000 -0.0000
##
## - Fold10: shrinkage=0.1, interaction.depth= 4, n.minobsinnode=10, n.trees=500
## + Fold10: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3363 nan 0.1000 0.0142
## 2 1.3119 nan 0.1000 0.0111
## 3 1.2893 nan 0.1000 0.0093
## 4 1.2664 nan 0.1000 0.0100
## 5 1.2473 nan 0.1000 0.0081
## 6 1.2313 nan 0.1000 0.0065
## 7 1.2156 nan 0.1000 0.0067
## 8 1.2018 nan 0.1000 0.0053
## 9 1.1893 nan 0.1000 0.0055
## 10 1.1754 nan 0.1000 0.0051
## 20 1.0849 nan 0.1000 0.0024
## 40 0.9635 nan 0.1000 0.0012
## 60 0.8911 nan 0.1000 -0.0003
## 80 0.8362 nan 0.1000 0.0006
## 100 0.7839 nan 0.1000 0.0004
## 120 0.7421 nan 0.1000 -0.0002
## 140 0.7101 nan 0.1000 0.0002
## 160 0.6785 nan 0.1000 0.0006
## 180 0.6510 nan 0.1000 -0.0002
## 200 0.6226 nan 0.1000 -0.0005
## 220 0.5990 nan 0.1000 -0.0005
## 240 0.5774 nan 0.1000 -0.0001
## 260 0.5563 nan 0.1000 -0.0004
## 280 0.5369 nan 0.1000 -0.0002
## 300 0.5198 nan 0.1000 -0.0003
## 320 0.5045 nan 0.1000 -0.0005
## 340 0.4895 nan 0.1000 -0.0006
## 360 0.4756 nan 0.1000 -0.0003
## 380 0.4607 nan 0.1000 -0.0005
## 400 0.4469 nan 0.1000 -0.0006
## 420 0.4347 nan 0.1000 -0.0003
## 440 0.4202 nan 0.1000 0.0000
## 460 0.4056 nan 0.1000 -0.0005
## 480 0.3932 nan 0.1000 -0.0006
## 500 0.3827 nan 0.1000 -0.0003
##
## - Fold10: shrinkage=0.1, interaction.depth= 5, n.minobsinnode=10, n.trees=500
## + Fold10: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3313 nan 0.1000 0.0158
## 2 1.3022 nan 0.1000 0.0118
## 3 1.2767 nan 0.1000 0.0118
## 4 1.2525 nan 0.1000 0.0101
## 5 1.2342 nan 0.1000 0.0070
## 6 1.2168 nan 0.1000 0.0068
## 7 1.1999 nan 0.1000 0.0065
## 8 1.1859 nan 0.1000 0.0053
## 9 1.1702 nan 0.1000 0.0063
## 10 1.1586 nan 0.1000 0.0046
## 20 1.0585 nan 0.1000 0.0018
## 40 0.9435 nan 0.1000 0.0009
## 60 0.8608 nan 0.1000 -0.0002
## 80 0.8029 nan 0.1000 -0.0002
## 100 0.7554 nan 0.1000 0.0001
## 120 0.7137 nan 0.1000 0.0000
## 140 0.6739 nan 0.1000 0.0001
## 160 0.6404 nan 0.1000 -0.0001
## 180 0.6130 nan 0.1000 -0.0003
## 200 0.5819 nan 0.1000 -0.0001
## 220 0.5525 nan 0.1000 -0.0002
## 240 0.5306 nan 0.1000 -0.0005
## 260 0.5099 nan 0.1000 0.0001
## 280 0.4915 nan 0.1000 -0.0002
## 300 0.4693 nan 0.1000 -0.0003
## 320 0.4508 nan 0.1000 -0.0001
## 340 0.4344 nan 0.1000 -0.0001
## 360 0.4179 nan 0.1000 -0.0003
## 380 0.4040 nan 0.1000 -0.0002
## 400 0.3893 nan 0.1000 -0.0004
## 420 0.3750 nan 0.1000 -0.0001
## 440 0.3629 nan 0.1000 -0.0003
## 460 0.3504 nan 0.1000 -0.0003
## 480 0.3381 nan 0.1000 -0.0004
## 500 0.3249 nan 0.1000 -0.0003
##
## - Fold10: shrinkage=0.1, interaction.depth= 6, n.minobsinnode=10, n.trees=500
## + Fold10: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3318 nan 0.1000 0.0162
## 2 1.3037 nan 0.1000 0.0124
## 3 1.2761 nan 0.1000 0.0122
## 4 1.2539 nan 0.1000 0.0084
## 5 1.2373 nan 0.1000 0.0057
## 6 1.2206 nan 0.1000 0.0066
## 7 1.2051 nan 0.1000 0.0062
## 8 1.1893 nan 0.1000 0.0060
## 9 1.1756 nan 0.1000 0.0046
## 10 1.1631 nan 0.1000 0.0047
## 20 1.0597 nan 0.1000 0.0033
## 40 0.9335 nan 0.1000 0.0023
## 60 0.8533 nan 0.1000 0.0005
## 80 0.7835 nan 0.1000 0.0004
## 100 0.7272 nan 0.1000 0.0006
## 120 0.6827 nan 0.1000 0.0002
## 140 0.6408 nan 0.1000 -0.0002
## 160 0.6028 nan 0.1000 -0.0005
## 180 0.5716 nan 0.1000 -0.0001
## 200 0.5396 nan 0.1000 -0.0003
## 220 0.5134 nan 0.1000 -0.0003
## 240 0.4893 nan 0.1000 -0.0004
## 260 0.4650 nan 0.1000 -0.0002
## 280 0.4446 nan 0.1000 -0.0005
## 300 0.4245 nan 0.1000 -0.0005
## 320 0.4063 nan 0.1000 -0.0004
## 340 0.3881 nan 0.1000 -0.0002
## 360 0.3703 nan 0.1000 -0.0004
## 380 0.3531 nan 0.1000 -0.0005
## 400 0.3390 nan 0.1000 0.0000
## 420 0.3254 nan 0.1000 -0.0003
## 440 0.3110 nan 0.1000 -0.0000
## 460 0.2985 nan 0.1000 -0.0003
## 480 0.2863 nan 0.1000 -0.0003
## 500 0.2746 nan 0.1000 -0.0001
##
## - Fold10: shrinkage=0.1, interaction.depth= 7, n.minobsinnode=10, n.trees=500
## + Fold10: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3281 nan 0.1000 0.0182
## 2 1.2964 nan 0.1000 0.0150
## 3 1.2667 nan 0.1000 0.0130
## 4 1.2413 nan 0.1000 0.0120
## 5 1.2197 nan 0.1000 0.0082
## 6 1.1998 nan 0.1000 0.0072
## 7 1.1834 nan 0.1000 0.0059
## 8 1.1660 nan 0.1000 0.0067
## 9 1.1522 nan 0.1000 0.0039
## 10 1.1359 nan 0.1000 0.0070
## 20 1.0175 nan 0.1000 0.0033
## 40 0.8766 nan 0.1000 0.0004
## 60 0.7901 nan 0.1000 0.0002
## 80 0.7242 nan 0.1000 0.0000
## 100 0.6682 nan 0.1000 0.0003
## 120 0.6212 nan 0.1000 -0.0007
## 140 0.5768 nan 0.1000 0.0005
## 160 0.5408 nan 0.1000 0.0000
## 180 0.5102 nan 0.1000 -0.0004
## 200 0.4805 nan 0.1000 -0.0003
## 220 0.4528 nan 0.1000 -0.0003
## 240 0.4259 nan 0.1000 -0.0003
## 260 0.4033 nan 0.1000 -0.0002
## 280 0.3829 nan 0.1000 -0.0001
## 300 0.3636 nan 0.1000 -0.0003
## 320 0.3432 nan 0.1000 -0.0005
## 340 0.3255 nan 0.1000 -0.0005
## 360 0.3079 nan 0.1000 0.0001
## 380 0.2933 nan 0.1000 -0.0005
## 400 0.2778 nan 0.1000 -0.0002
## 420 0.2644 nan 0.1000 -0.0004
## 440 0.2516 nan 0.1000 -0.0001
## 460 0.2401 nan 0.1000 -0.0003
## 480 0.2294 nan 0.1000 -0.0003
## 500 0.2185 nan 0.1000 -0.0001
##
## - Fold10: shrinkage=0.1, interaction.depth= 8, n.minobsinnode=10, n.trees=500
## + Fold10: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3294 nan 0.1000 0.0170
## 2 1.2977 nan 0.1000 0.0132
## 3 1.2687 nan 0.1000 0.0133
## 4 1.2420 nan 0.1000 0.0110
## 5 1.2177 nan 0.1000 0.0108
## 6 1.1964 nan 0.1000 0.0085
## 7 1.1770 nan 0.1000 0.0075
## 8 1.1589 nan 0.1000 0.0079
## 9 1.1416 nan 0.1000 0.0064
## 10 1.1285 nan 0.1000 0.0043
## 20 1.0049 nan 0.1000 0.0047
## 40 0.8575 nan 0.1000 0.0007
## 60 0.7638 nan 0.1000 0.0002
## 80 0.6983 nan 0.1000 -0.0001
## 100 0.6433 nan 0.1000 0.0003
## 120 0.5916 nan 0.1000 0.0004
## 140 0.5518 nan 0.1000 0.0002
## 160 0.5130 nan 0.1000 -0.0006
## 180 0.4809 nan 0.1000 -0.0003
## 200 0.4465 nan 0.1000 0.0001
## 220 0.4204 nan 0.1000 -0.0001
## 240 0.3939 nan 0.1000 -0.0003
## 260 0.3683 nan 0.1000 -0.0004
## 280 0.3468 nan 0.1000 -0.0003
## 300 0.3261 nan 0.1000 -0.0002
## 320 0.3090 nan 0.1000 -0.0001
## 340 0.2928 nan 0.1000 -0.0001
## 360 0.2760 nan 0.1000 -0.0001
## 380 0.2600 nan 0.1000 -0.0002
## 400 0.2448 nan 0.1000 -0.0004
## 420 0.2313 nan 0.1000 -0.0003
## 440 0.2182 nan 0.1000 -0.0003
## 460 0.2068 nan 0.1000 -0.0002
## 480 0.1959 nan 0.1000 -0.0002
## 500 0.1854 nan 0.1000 -0.0003
##
## - Fold10: shrinkage=0.1, interaction.depth= 9, n.minobsinnode=10, n.trees=500
## + Fold10: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3281 nan 0.1000 0.0172
## 2 1.2938 nan 0.1000 0.0156
## 3 1.2655 nan 0.1000 0.0106
## 4 1.2416 nan 0.1000 0.0096
## 5 1.2174 nan 0.1000 0.0094
## 6 1.1989 nan 0.1000 0.0062
## 7 1.1802 nan 0.1000 0.0073
## 8 1.1616 nan 0.1000 0.0066
## 9 1.1444 nan 0.1000 0.0068
## 10 1.1289 nan 0.1000 0.0052
## 20 1.0104 nan 0.1000 0.0045
## 40 0.8552 nan 0.1000 0.0008
## 60 0.7547 nan 0.1000 0.0004
## 80 0.6794 nan 0.1000 -0.0007
## 100 0.6205 nan 0.1000 0.0001
## 120 0.5691 nan 0.1000 -0.0001
## 140 0.5248 nan 0.1000 -0.0001
## 160 0.4855 nan 0.1000 -0.0002
## 180 0.4520 nan 0.1000 -0.0002
## 200 0.4210 nan 0.1000 -0.0005
## 220 0.3914 nan 0.1000 -0.0003
## 240 0.3667 nan 0.1000 -0.0007
## 260 0.3432 nan 0.1000 -0.0002
## 280 0.3214 nan 0.1000 -0.0004
## 300 0.3021 nan 0.1000 -0.0001
## 320 0.2828 nan 0.1000 -0.0003
## 340 0.2651 nan 0.1000 -0.0002
## 360 0.2490 nan 0.1000 -0.0005
## 380 0.2333 nan 0.1000 -0.0002
## 400 0.2209 nan 0.1000 -0.0001
## 420 0.2088 nan 0.1000 -0.0003
## 440 0.1963 nan 0.1000 -0.0003
## 460 0.1865 nan 0.1000 -0.0003
## 480 0.1757 nan 0.1000 -0.0001
## 500 0.1659 nan 0.1000 -0.0002
##
## - Fold10: shrinkage=0.1, interaction.depth=10, n.minobsinnode=10, n.trees=500
## Aggregating results
## Selecting tuning parameters
## Fitting n.trees = 50, interaction.depth = 1, shrinkage = 0.1, n.minobsinnode = 10 on full training set
## Iter TrainDeviance ValidDeviance StepSize Improve
## 1 1.3522 nan 0.1000 0.0068
## 2 1.3398 nan 0.1000 0.0056
## 3 1.3302 nan 0.1000 0.0049
## 4 1.3198 nan 0.1000 0.0050
## 5 1.3098 nan 0.1000 0.0037
## 6 1.3026 nan 0.1000 0.0038
## 7 1.2954 nan 0.1000 0.0035
## 8 1.2901 nan 0.1000 0.0023
## 9 1.2831 nan 0.1000 0.0028
## 10 1.2765 nan 0.1000 0.0026
## 20 1.2360 nan 0.1000 0.0012
## 40 1.1939 nan 0.1000 0.0003
## 50 1.1788 nan 0.1000 0.0003
# Summary of model
gbmpredict <- predict(gbmodel,newdata = test2GB)
endtime <- Sys.time()
print(endtime-starttime)
## Time difference of 9.14641 mins
#plot(model2)
plot(gbmodel, print.thres = 0.5, type="S")

caret::confusionMatrix(gbmpredict,test2GB$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 872 89
## X1 206 104
##
## Accuracy : 0.7679
## 95% CI : (0.7437, 0.7909)
## No Information Rate : 0.8482
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.2785
##
## Mcnemar's Test P-Value : 1.44e-11
##
## Sensitivity : 0.53886
## Specificity : 0.80891
## Pos Pred Value : 0.33548
## Neg Pred Value : 0.90739
## Precision : 0.33548
## Recall : 0.53886
## F1 : 0.41352
## Prevalence : 0.15185
## Detection Rate : 0.08183
## Detection Prevalence : 0.24390
## Balanced Accuracy : 0.67388
##
## 'Positive' Class : X1
##
gbmpredict1 <- predict(gbmodel,newdata = train2GB)
caret::confusionMatrix(gbmpredict1,train2GB$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 1981 211
## X1 537 240
##
## Accuracy : 0.7481
## 95% CI : (0.732, 0.7636)
## No Information Rate : 0.8481
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.2459
##
## Mcnemar's Test P-Value : <2e-16
##
## Sensitivity : 0.53215
## Specificity : 0.78674
## Pos Pred Value : 0.30888
## Neg Pred Value : 0.90374
## Precision : 0.30888
## Recall : 0.53215
## F1 : 0.39088
## Prevalence : 0.15190
## Detection Rate : 0.08084
## Detection Prevalence : 0.26170
## Balanced Accuracy : 0.65944
##
## 'Positive' Class : X1
##
#evalm(gbmodel)
J48
set.seed(108)
sample2 <- createDataPartition(newdata2$TenYearCHD,p=0.7,list = FALSE)
train2j48 <- newdata2[sample2, ]
test2j48 <- newdata2[-sample2,]
#train2j48 <- train2C5[,-2]
#test2j48 <- test2C5[,-2]
levels(train2j48$TenYearCHD) <- make.names(levels(train2j48$TenYearCHD))
levels(test2j48$TenYearCHD) <- make.names(levels(test2j48$TenYearCHD))
repeats <- 3
numbers <- 10
tunel <- 10
x <- trainControl(method = "repeatedcv",
number = numbers,
repeats = repeats,
classProbs = TRUE,
summaryFunction = twoClassSummary,
sampling = "smote",
verboseIter = FALSE,
savePredictions = TRUE
)
starttime <- Sys.time()
#mtry <- sqrt(ncol(newdata2))
#tgrid <- expand.grid(maxdepth = 25)
J48model <- caret::train(TenYearCHD~., data = train2j48, method = "J48",
#
trControl = x,
metric = "ROC",
tuneLength = tunel)
#tunegrid=tgrid)
#ntree=5)
# Summary of model
J48predict <- predict(J48model,newdata = test2j48)
endtime <- Sys.time()
print(endtime-starttime)
## Time difference of 32.3574 mins
#plot(c5model, print.thres = 0.5, type="S")
imp <- caret::varImp(J48model,useModel=TRUE,scale=FALSE)
plot(imp)

plot(J48model$finalModel)

# plot(c5model$finalModel)
# text(c5model$finalModel)
#fancyRpartPlot(J48model$finalModel,palettes=c("Blues","Oranges"))
caret::confusionMatrix(J48predict,test2j48$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 813 109
## X1 265 84
##
## Accuracy : 0.7057
## 95% CI : (0.6798, 0.7307)
## No Information Rate : 0.8482
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.1422
##
## Mcnemar's Test P-Value : 1.103e-15
##
## Sensitivity : 0.43523
## Specificity : 0.75417
## Pos Pred Value : 0.24069
## Neg Pred Value : 0.88178
## Precision : 0.24069
## Recall : 0.43523
## F1 : 0.30996
## Prevalence : 0.15185
## Detection Rate : 0.06609
## Detection Prevalence : 0.27459
## Balanced Accuracy : 0.59470
##
## 'Positive' Class : X1
##
J48predict1 <- predict(J48model,newdata = train2j48)
caret::confusionMatrix(J48predict1,train2j48$TenYearCHD, positive="X1",mode="everything")
## Confusion Matrix and Statistics
##
## Reference
## Prediction X0 X1
## X0 2025 134
## X1 493 317
##
## Accuracy : 0.7888
## 95% CI : (0.7737, 0.8034)
## No Information Rate : 0.8481
## P-Value [Acc > NIR] : 1
##
## Kappa : 0.3822
##
## Mcnemar's Test P-Value : <2e-16
##
## Sensitivity : 0.7029
## Specificity : 0.8042
## Pos Pred Value : 0.3914
## Neg Pred Value : 0.9379
## Precision : 0.3914
## Recall : 0.7029
## F1 : 0.5028
## Prevalence : 0.1519
## Detection Rate : 0.1068
## Detection Prevalence : 0.2728
## Balanced Accuracy : 0.7535
##
## 'Positive' Class : X1
##
J485res <- evalm(J48model)
## ***MLeval: Machine Learning Model Evaluation in R***
## Input: caret train function object
## Averaging probs.
## Group 1 type: repeatedcv
## Observations: 2969
## Number of groups: 1
## Observations per group: 2969
## Positive: X1
## Negative: X0
## Group: Group 1
## Positive: 451
## Negative: 2518
## ***Performance Metrics***



## Group 1 Optimal Informedness = 0.289029409537362
## Group 1 AUC-ROC = 0.68
